prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>nil.hpp<|end_file_name|><|fim▁begin|>// // MessagePack for C++ static resolution routine // // Copyright (C) 2008-2009 FURUHASHI Sadayuki // // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // #ifndef MSGPACK_TYPE_NIL_HPP #define MSGPACK_TYPE_NIL_HPP #include "msgpack/versioning.hpp" #include "msgpack/adaptor/adaptor_base.hpp" namespace msgpack { /// @cond MSGPACK_API_VERSION_NAMESPACE(v1) { /// @endcond namespace type { struct nil_t { }; #if !defined(MSGPACK_DISABLE_LEGACY_NIL) typedef nil_t nil; #endif // !defined(MSGPACK_DISABLE_LEGACY_NIL) inline bool operator<(nil_t const& lhs, nil_t const& rhs) { return &lhs < &rhs; } inline bool operator==(nil_t const& lhs, nil_t const& rhs) { return &lhs == &rhs; } } // namespace type namespace adaptor { template <> struct convert<type::nil_t> { msgpack::object const& operator()(msgpack::object const& o, type::nil_t&) const { if(o.type != msgpack::type::NIL) { throw msgpack::type_error(); } return o; } }; template <> struct pack<type::nil_t> { template <typename Stream> msgpack::packer<Stream>& operator()(msgpack::packer<Stream>& o, const type::nil_t&) const { o.pack_nil(); return o; } }; template <> struct object<type::nil_t> { void operator()(msgpack::object& o, type::nil_t) const { o.type = msgpack::type::NIL; } }; template <> struct object_with_zone<type::nil_t> { void operator()(msgpack::object::with_zone& o, type::nil_t v) const { static_cast<msgpack::object&>(o) << v; } }; } // namespace adaptor template <> inline void msgpack::object::as<void>() const { msgpack::type::nil_t v; convert(v); } /// @cond<|fim▁hole|>} // namespace msgpack #endif // MSGPACK_TYPE_NIL_HPP<|fim▁end|>
} // MSGPACK_API_VERSION_NAMESPACE(v1) /// @endcond
<|file_name|>ValueSymbolTable.py<|end_file_name|><|fim▁begin|>from binding import * from .Value import ValueSymbolTable, Value from .ADT.StringRef import StringRef @ValueSymbolTable class ValueSymbolTable: if LLVM_VERSION >= (3, 3): _include_ = 'llvm/IR/ValueSymbolTable.h' else:<|fim▁hole|> new = Constructor() delete = Destructor() lookup = Method(ptr(Value), cast(str, StringRef)) empty = Method(cast(Bool, bool)) size = Method(cast(Unsigned, int)) dump = Method(Void)<|fim▁end|>
_include_ = 'llvm/ValueSymbolTable.h'
<|file_name|>FavoriteImpl.ts<|end_file_name|><|fim▁begin|>/** * Swaggy Jenkins * Jenkins API clients generated from Swagger / Open API specification * * The version of the OpenAPI document: 1.1.2-pre.0 * Contact: [email protected] * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually.<|fim▁hole|>export interface FavoriteImpl { _class?: string; _links?: models.FavoriteImpllinks; item?: models.PipelineImpl; }<|fim▁end|>
*/ import * as models from './models';
<|file_name|>display_list_builder.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Builds display lists from flows and fragments. //! //! Other browser engines sometimes call this "painting", but it is more accurately called display //! list building, as the actual painting does not happen here—only deciding *what* we're going to //! paint. #![deny(unsafe_code)] use azure::azure_hl::Color; use block::BlockFlow; use context::LayoutContext; use flex::FlexFlow; use flow::{self, BaseFlow, Flow, IS_ABSOLUTELY_POSITIONED}; use flow_ref; use fragment::{CoordinateSystem, Fragment, HAS_LAYER, IframeFragmentInfo, ImageFragmentInfo}; use fragment::{ScannedTextFragmentInfo, SpecificFragmentInfo}; use inline::{FIRST_FRAGMENT_OF_ELEMENT, InlineFlow, LAST_FRAGMENT_OF_ELEMENT}; use list_item::ListItemFlow; use model::{self, MaybeAuto, ToGfxMatrix}; use table_cell::CollapsedBordersForCell; use canvas_traits::{CanvasMsg, FromLayoutMsg}; use euclid::Matrix4; use euclid::{Point2D, Point3D, Rect, Size2D, SideOffsets2D}; use gfx::display_list::{BLUR_INFLATION_FACTOR, BaseDisplayItem, BorderDisplayItem}; use gfx::display_list::{BorderRadii, BoxShadowClipMode, BoxShadowDisplayItem, ClippingRegion}; use gfx::display_list::{DisplayItem, DisplayList, DisplayItemMetadata}; use gfx::display_list::{GradientDisplayItem}; use gfx::display_list::{GradientStop, ImageDisplayItem, LineDisplayItem}; use gfx::display_list::{OpaqueNode, SolidColorDisplayItem}; use gfx::display_list::{StackingContext, TextDisplayItem, TextOrientation}; use gfx::paint_task::THREAD_TINT_COLORS; use gfx_traits::color; use ipc_channel::ipc::{self, IpcSharedMemory}; use msg::compositor_msg::{ScrollPolicy, LayerId}; use msg::constellation_msg::ConstellationChan; use msg::constellation_msg::Msg as ConstellationMsg; use net_traits::image::base::{Image, PixelFormat}; use net_traits::image_cache_task::UsePlaceholder; use std::cmp; use std::default::Default; use std::f32; use std::sync::Arc; use std::sync::mpsc::channel; use style::computed_values::filter::Filter; use style::computed_values::{background_attachment, background_clip, background_origin}; use style::computed_values::{background_repeat, background_size}; use style::computed_values::{border_style, image_rendering, overflow_x, position}; use style::computed_values::{visibility, transform, transform_style}; use style::properties::style_structs::Border; use style::properties::{self, ComputedValues}; use style::values::RGBA; use style::values::computed; use style::values::computed::LinearGradient; use style::values::computed::{LengthOrNone, LengthOrPercentage, LengthOrPercentageOrAuto}; use style::values::specified::{AngleOrCorner, HorizontalDirection, VerticalDirection}; use url::Url; use util::cursor::Cursor; use util::geometry::{Au, ZERO_POINT}; use util::logical_geometry::{LogicalPoint, LogicalRect, LogicalSize, WritingMode}; use util::opts; /// The fake fragment ID we use to indicate the inner display list for `overflow: scroll`. /// /// FIXME(pcwalton): This is pretty ugly. Consider modifying `LayerId` somehow. const FAKE_FRAGMENT_ID_FOR_OVERFLOW_SCROLL: u32 = 1000000; /// Whether a stacking context needs a layer or not. pub enum StackingContextLayerNecessity { Always(LayerId, ScrollPolicy), IfCanvas(LayerId), } /// The results of display list building for a single flow. pub enum DisplayListBuildingResult { None, StackingContext(Arc<StackingContext>), Normal(Box<DisplayList>), } impl DisplayListBuildingResult { /// Adds the display list items contained within this display list building result to the given /// display list, preserving stacking order. If this display list building result does not /// consist of an entire stacking context, it will be emptied. pub fn add_to(&mut self, display_list: &mut DisplayList) { match *self { DisplayListBuildingResult::None => return, DisplayListBuildingResult::StackingContext(ref mut stacking_context) => { display_list.children.push_back((*stacking_context).clone()) } DisplayListBuildingResult::Normal(ref mut source_display_list) => { display_list.append_from(&mut **source_display_list) } } } } pub trait FragmentDisplayListBuilding { /// Adds the display items necessary to paint the background of this fragment to the display /// list if necessary. fn build_display_list_for_background_if_applicable(&self, style: &ComputedValues, display_list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion); /// Computes the background size for an image with the given background area according to the /// rules in CSS-BACKGROUNDS § 3.9. fn compute_background_image_size(&self, style: &ComputedValues, bounds: &Rect<Au>, image: &Image) -> Size2D<Au>; /// Adds the display items necessary to paint the background image of this fragment to the /// display list at the appropriate stacking level. fn build_display_list_for_background_image(&self, style: &ComputedValues, display_list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion, image_url: &Url); /// Adds the display items necessary to paint the background linear gradient of this fragment /// to the display list at the appropriate stacking level. fn build_display_list_for_background_linear_gradient(&self, display_list: &mut DisplayList, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion, gradient: &LinearGradient, style: &ComputedValues); /// Adds the display items necessary to paint the borders of this fragment to a display list if /// necessary. fn build_display_list_for_borders_if_applicable( &self, style: &ComputedValues, border_painting_mode: BorderPaintingMode, display_list: &mut DisplayList, bounds: &Rect<Au>, level: StackingLevel, clip: &ClippingRegion); /// Adds the display items necessary to paint the outline of this fragment to the display list /// if necessary. fn build_display_list_for_outline_if_applicable(&self, style: &ComputedValues, display_list: &mut DisplayList, bounds: &Rect<Au>, clip: &ClippingRegion); /// Adds the display items necessary to paint the box shadow of this fragment to the display /// list if necessary. fn build_display_list_for_box_shadow_if_applicable(&self, style: &ComputedValues, list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion); /// Adds display items necessary to draw debug boxes around a scanned text fragment. fn build_debug_borders_around_text_fragments(&self, style: &ComputedValues, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, stacking_relative_content_box: &Rect<Au>, text_fragment: &ScannedTextFragmentInfo, clip: &ClippingRegion); /// Adds display items necessary to draw debug boxes around this fragment. fn build_debug_borders_around_fragment(&self, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, clip: &ClippingRegion); /// Adds the display items for this fragment to the given display list. /// /// Arguments: /// /// * `display_list`: The display list to add display items to. /// * `layout_context`: The layout context. /// * `dirty`: The dirty rectangle in the coordinate system of the owning flow. /// * `stacking_relative_flow_origin`: Position of the origin of the owning flow with respect /// to its nearest ancestor stacking context. /// * `relative_containing_block_size`: The size of the containing block that /// `position: relative` makes use of. /// * `clip`: The region to clip the display items to. /// * `stacking_relative_display_port`: The position and size of the display port with respect /// to the nearest ancestor stacking context. fn build_display_list(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, stacking_relative_flow_origin: &Point2D<Au>, relative_containing_block_size: &LogicalSize<Au>, relative_containing_block_mode: WritingMode, border_painting_mode: BorderPaintingMode, background_and_border_level: BackgroundAndBorderLevel, clip: &ClippingRegion, stacking_relative_display_port: &Rect<Au>); /// Sends the size and position of this iframe fragment to the constellation. This is out of /// line to guide inlining. fn finalize_position_and_size_of_iframe(&self, iframe_fragment: &IframeFragmentInfo, offset: Point2D<Au>, layout_context: &LayoutContext); /// Returns the appropriate clipping region for descendants of this fragment. fn clipping_region_for_children(&self, current_clip: &ClippingRegion, stacking_relative_border_box: &Rect<Au>, is_absolutely_positioned: bool) -> ClippingRegion; /// Calculates the clipping rectangle for a fragment, taking the `clip` property into account /// per CSS 2.1 § 11.1.2. fn calculate_style_specified_clip(&self, parent_clip: &ClippingRegion, stacking_relative_border_box: &Rect<Au>) -> ClippingRegion; /// Creates the text display item for one text fragment. This can be called multiple times for /// one fragment if there are text shadows. /// /// `shadow_blur_radius` will be `Some` if this is a shadow, even if the blur radius is zero. fn build_display_list_for_text_fragment(&self, display_list: &mut DisplayList, text_fragment: &ScannedTextFragmentInfo, text_color: RGBA, stacking_relative_content_box: &Rect<Au>, shadow_blur_radius: Option<Au>, offset: &Point2D<Au>, clip: &ClippingRegion); /// Creates the display item for a text decoration: underline, overline, or line-through. fn build_display_list_for_text_decoration(&self, display_list: &mut DisplayList, color: &RGBA, stacking_relative_box: &LogicalRect<Au>, clip: &ClippingRegion, blur_radius: Au); /// A helper method that `build_display_list` calls to create per-fragment-type display items. fn build_fragment_type_specific_display_items(&mut self, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, clip: &ClippingRegion); /// Creates a stacking context for associated fragment. fn create_stacking_context(&self, base_flow: &BaseFlow, display_list: Box<DisplayList>, layout_context: &LayoutContext, needs_layer: StackingContextLayerNecessity, mode: StackingContextCreationMode) -> Arc<StackingContext>; } fn handle_overlapping_radii(size: &Size2D<Au>, radii: &BorderRadii<Au>) -> BorderRadii<Au> { // No two corners' border radii may add up to more than the length of the edge // between them. To prevent that, all radii are scaled down uniformly. fn scale_factor(radius_a: Au, radius_b: Au, edge_length: Au) -> f32 { let required = radius_a + radius_b; if required <= edge_length { 1.0 } else { edge_length.to_f32_px() / required.to_f32_px() } } let top_factor = scale_factor(radii.top_left.width, radii.top_right.width, size.width); let bottom_factor = scale_factor(radii.bottom_left.width, radii.bottom_right.width, size.width); let left_factor = scale_factor(radii.top_left.height, radii.bottom_left.height, size.height); let right_factor = scale_factor(radii.top_right.height, radii.bottom_right.height, size.height); let min_factor = top_factor.min(bottom_factor).min(left_factor).min(right_factor); if min_factor < 1.0 { radii.scale_by(min_factor) } else { *radii } } fn build_border_radius(abs_bounds: &Rect<Au>, border_style: &Border) -> BorderRadii<Au> { // TODO(cgaebel): Support border radii even in the case of multiple border widths. // This is an extension of supporting elliptical radii. For now, all percentage // radii will be relative to the width. handle_overlapping_radii(&abs_bounds.size, &BorderRadii { top_left: model::specified_border_radius(border_style.border_top_left_radius, abs_bounds.size.width), top_right: model::specified_border_radius(border_style.border_top_right_radius, abs_bounds.size.width), bottom_right: model::specified_border_radius(border_style.border_bottom_right_radius, abs_bounds.size.width), bottom_left: model::specified_border_radius(border_style.border_bottom_left_radius, abs_bounds.size.width), }) } impl FragmentDisplayListBuilding for Fragment { fn build_display_list_for_background_if_applicable(&self, style: &ComputedValues, display_list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion) { // Adjust the clipping region as necessary to account for `border-radius`. let border_radii = build_border_radius(absolute_bounds, style.get_border()); let mut clip = (*clip).clone(); if !border_radii.is_square() { clip = clip.intersect_with_rounded_rect(absolute_bounds, &border_radii) } // FIXME: This causes a lot of background colors to be displayed when they are clearly not // needed. We could use display list optimization to clean this up, but it still seems // inefficient. What we really want is something like "nearest ancestor element that // doesn't have a fragment". let background_color = style.resolve_color(style.get_background().background_color); // 'background-clip' determines the area within which the background is painted. // http://dev.w3.org/csswg/css-backgrounds-3/#the-background-clip let mut bounds = *absolute_bounds; match style.get_background().background_clip { background_clip::T::border_box => {} background_clip::T::padding_box => { let border = style.logical_border_width().to_physical(style.writing_mode); bounds.origin.x = bounds.origin.x + border.left; bounds.origin.y = bounds.origin.y + border.top; bounds.size.width = bounds.size.width - border.horizontal(); bounds.size.height = bounds.size.height - border.vertical(); } background_clip::T::content_box => { let border_padding = self.border_padding.to_physical(style.writing_mode); bounds.origin.x = bounds.origin.x + border_padding.left; bounds.origin.y = bounds.origin.y + border_padding.top; bounds.size.width = bounds.size.width - border_padding.horizontal(); bounds.size.height = bounds.size.height - border_padding.vertical(); } } display_list.push(DisplayItem::SolidColorClass(box SolidColorDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), clip.clone()), color: background_color.to_gfx_color(), }), level); // The background image is painted on top of the background color. // Implements background image, per spec: // http://www.w3.org/TR/CSS21/colors.html#background let background = style.get_background(); match background.background_image.0 { None => {} Some(computed::Image::LinearGradient(ref gradient)) => { self.build_display_list_for_background_linear_gradient(display_list, level, absolute_bounds, &clip, gradient, style) } Some(computed::Image::Url(ref image_url)) => { self.build_display_list_for_background_image(style, display_list, layout_context, level, absolute_bounds, &clip, image_url) } } } fn compute_background_image_size(&self, style: &ComputedValues, bounds: &Rect<Au>, image: &Image) -> Size2D<Au> { // If `image_aspect_ratio` < `bounds_aspect_ratio`, the image is tall; otherwise, it is // wide. let image_aspect_ratio = (image.width as f64) / (image.height as f64); let bounds_aspect_ratio = bounds.size.width.to_f64_px() / bounds.size.height.to_f64_px(); let intrinsic_size = Size2D::new(Au::from_px(image.width as i32), Au::from_px(image.height as i32)); match (style.get_background().background_size.clone(), image_aspect_ratio < bounds_aspect_ratio) { (background_size::T::Contain, false) | (background_size::T::Cover, true) => { Size2D::new(bounds.size.width, Au::from_f64_px(bounds.size.width.to_f64_px() / image_aspect_ratio)) } (background_size::T::Contain, true) | (background_size::T::Cover, false) => { Size2D::new(Au::from_f64_px(bounds.size.height.to_f64_px() * image_aspect_ratio), bounds.size.height) } (background_size::T::Explicit(background_size::ExplicitSize { width, height: LengthOrPercentageOrAuto::Auto, }), _) => { let width = MaybeAuto::from_style(width, bounds.size.width) .specified_or_default(intrinsic_size.width); Size2D::new(width, Au::from_f64_px(width.to_f64_px() / image_aspect_ratio)) } (background_size::T::Explicit(background_size::ExplicitSize { width: LengthOrPercentageOrAuto::Auto, height }), _) => { let height = MaybeAuto::from_style(height, bounds.size.height) .specified_or_default(intrinsic_size.height); Size2D::new(Au::from_f64_px(height.to_f64_px() * image_aspect_ratio), height) } (background_size::T::Explicit(background_size::ExplicitSize { width, height }), _) => { Size2D::new(MaybeAuto::from_style(width, bounds.size.width) .specified_or_default(intrinsic_size.width), MaybeAuto::from_style(height, bounds.size.height) .specified_or_default(intrinsic_size.height)) } } } fn build_display_list_for_background_image(&self, style: &ComputedValues, display_list: &mut DisplayList, layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion, image_url: &Url) { let background = style.get_background(); let image = layout_context.get_or_request_image(image_url.clone(), UsePlaceholder::No); if let Some(image) = image { debug!("(building display list) building background image"); // Use `background-size` to get the size. let mut bounds = *absolute_bounds; let image_size = self.compute_background_image_size(style, &bounds, &*image); // Clip. // // TODO: Check the bounds to see if a clip item is actually required. let clip = clip.clone().intersect_rect(&bounds); // Background image should be positioned on the padding box basis. let border = style.logical_border_width().to_physical(style.writing_mode); // Use 'background-origin' to get the origin value. let (mut origin_x, mut origin_y) = match background.background_origin { background_origin::T::padding_box => { (Au(0), Au(0)) } background_origin::T::border_box => { (-border.left, -border.top) } background_origin::T::content_box => { let border_padding = self.border_padding.to_physical(self.style.writing_mode); (border_padding.left - border.left, border_padding.top - border.top) } }; // Use `background-attachment` to get the initial virtual origin let (virtual_origin_x, virtual_origin_y) = match background.background_attachment { background_attachment::T::scroll => { (absolute_bounds.origin.x, absolute_bounds.origin.y) } background_attachment::T::fixed => { // If the ‘background-attachment’ value for this image is ‘fixed’, then // 'background-origin' has no effect. origin_x = Au(0); origin_y = Au(0); (Au(0), Au(0)) } }; // Use `background-position` to get the offset. let horizontal_position = model::specified(background.background_position.horizontal, bounds.size.width - image_size.width); let vertical_position = model::specified(background.background_position.vertical, bounds.size.height - image_size.height); let abs_x = border.left + virtual_origin_x + horizontal_position + origin_x; let abs_y = border.top + virtual_origin_y + vertical_position + origin_y; // Adjust origin and size based on background-repeat match background.background_repeat { background_repeat::T::no_repeat => { bounds.origin.x = abs_x; bounds.origin.y = abs_y; bounds.size.width = image_size.width; bounds.size.height = image_size.height; } background_repeat::T::repeat_x => { bounds.origin.y = abs_y; bounds.size.height = image_size.height; ImageFragmentInfo::tile_image(&mut bounds.origin.x, &mut bounds.size.width, abs_x, image_size.width.to_nearest_px() as u32); } background_repeat::T::repeat_y => { bounds.origin.x = abs_x; bounds.size.width = image_size.width; ImageFragmentInfo::tile_image(&mut bounds.origin.y, &mut bounds.size.height, abs_y, image_size.height.to_nearest_px() as u32); } background_repeat::T::repeat => { ImageFragmentInfo::tile_image(&mut bounds.origin.x, &mut bounds.size.width, abs_x, image_size.width.to_nearest_px() as u32); ImageFragmentInfo::tile_image(&mut bounds.origin.y, &mut bounds.size.height, abs_y, image_size.height.to_nearest_px() as u32); } }; // Create the image display item. display_list.push(DisplayItem::ImageClass(box ImageDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), clip), image: image.clone(), stretch_size: Size2D::new(image_size.width, image_size.height), image_rendering: style.get_effects().image_rendering.clone(), }), level); } } fn build_display_list_for_background_linear_gradient(&self, display_list: &mut DisplayList, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion, gradient: &LinearGradient, style: &ComputedValues) { let clip = clip.clone().intersect_rect(absolute_bounds); // This is the distance between the center and the ending point; i.e. half of the distance // between the starting point and the ending point. let delta = match gradient.angle_or_corner { AngleOrCorner::Angle(angle) => { Point2D::new(Au::from_f32_px(angle.radians().sin() * absolute_bounds.size.width.to_f32_px() / 2.0), Au::from_f32_px(-angle.radians().cos() * absolute_bounds.size.height.to_f32_px() / 2.0)) } AngleOrCorner::Corner(horizontal, vertical) => { let x_factor = match horizontal { HorizontalDirection::Left => -1, HorizontalDirection::Right => 1, }; let y_factor = match vertical { VerticalDirection::Top => -1, VerticalDirection::Bottom => 1, }; Point2D::new(absolute_bounds.size.width * x_factor / 2, absolute_bounds.size.height * y_factor / 2) } }; // This is the length of the gradient line. let length = Au::from_f32_px( (delta.x.to_f32_px() * 2.0).hypot(delta.y.to_f32_px() * 2.0)); // Determine the position of each stop per CSS-IMAGES § 3.4. // // FIXME(#3908, pcwalton): Make sure later stops can't be behind earlier stops. let (mut stops, mut stop_run) = (Vec::new(), None); for (i, stop) in gradient.stops.iter().enumerate() { let offset = match stop.position { None => { if stop_run.is_none() { // Initialize a new stop run. let start_offset = if i == 0 { 0.0 } else { // `unwrap()` here should never fail because this is the beginning of // a stop run, which is always bounded by a length or percentage. position_to_offset(gradient.stops[i - 1].position.unwrap(), length) }; let (end_index, end_offset) = match gradient.stops[i..] .iter() .enumerate() .find(|&(_, ref stop)| stop.position.is_some()) { None => (gradient.stops.len() - 1, 1.0), Some((end_index, end_stop)) => { // `unwrap()` here should never fail because this is the end of // a stop run, which is always bounded by a length or // percentage. (end_index, position_to_offset(end_stop.position.unwrap(), length)) } }; stop_run = Some(StopRun { start_offset: start_offset, end_offset: end_offset, start_index: i, stop_count: end_index - i, }) } let stop_run = stop_run.unwrap(); let stop_run_length = stop_run.end_offset - stop_run.start_offset; if stop_run.stop_count == 0 { stop_run.end_offset } else { stop_run.start_offset + stop_run_length * (i - stop_run.start_index) as f32 / (stop_run.stop_count as f32) } } Some(position) => { stop_run = None; position_to_offset(position, length) } }; stops.push(GradientStop { offset: offset, color: style.resolve_color(stop.color).to_gfx_color() }) } let center = Point2D::new(absolute_bounds.origin.x + absolute_bounds.size.width / 2, absolute_bounds.origin.y + absolute_bounds.size.height / 2); let gradient_display_item = DisplayItem::GradientClass(box GradientDisplayItem { base: BaseDisplayItem::new(*absolute_bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), clip), start_point: center - delta, end_point: center + delta, stops: stops, }); display_list.push(gradient_display_item, level) } fn build_display_list_for_box_shadow_if_applicable(&self, style: &ComputedValues, list: &mut DisplayList, _layout_context: &LayoutContext, level: StackingLevel, absolute_bounds: &Rect<Au>, clip: &ClippingRegion) { // NB: According to CSS-BACKGROUNDS, box shadows render in *reverse* order (front to back). for box_shadow in style.get_effects().box_shadow.0.iter().rev() { let bounds = shadow_bounds(&absolute_bounds.translate(&Point2D::new(box_shadow.offset_x, box_shadow.offset_y)), box_shadow.blur_radius, box_shadow.spread_radius); list.push(DisplayItem::BoxShadowClass(box BoxShadowDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), box_bounds: *absolute_bounds, color: style.resolve_color(box_shadow.color).to_gfx_color(), offset: Point2D::new(box_shadow.offset_x, box_shadow.offset_y), blur_radius: box_shadow.blur_radius, spread_radius: box_shadow.spread_radius, clip_mode: if box_shadow.inset { BoxShadowClipMode::Inset } else { BoxShadowClipMode::Outset }, }), level); } } fn build_display_list_for_borders_if_applicable( &self, style: &ComputedValues, border_painting_mode: BorderPaintingMode, display_list: &mut DisplayList, bounds: &Rect<Au>, level: StackingLevel, clip: &ClippingRegion) { let mut border = style.logical_border_width(); match border_painting_mode { BorderPaintingMode::Separate => {} BorderPaintingMode::Collapse(collapsed_borders) => { collapsed_borders.adjust_border_widths_for_painting(&mut border) } BorderPaintingMode::Hidden => return, } if border.is_zero() { return } let border_style_struct = style.get_border(); let mut colors = SideOffsets2D::new(border_style_struct.border_top_color, border_style_struct.border_right_color, border_style_struct.border_bottom_color, border_style_struct.border_left_color); let mut border_style = SideOffsets2D::new(border_style_struct.border_top_style, border_style_struct.border_right_style, border_style_struct.border_bottom_style, border_style_struct.border_left_style); if let BorderPaintingMode::Collapse(collapsed_borders) = border_painting_mode { collapsed_borders.adjust_border_colors_and_styles_for_painting(&mut colors, &mut border_style, style.writing_mode); } let colors = SideOffsets2D::new(style.resolve_color(colors.top), style.resolve_color(colors.right), style.resolve_color(colors.bottom), style.resolve_color(colors.left)); // If this border collapses, then we draw outside the boundaries we were given. let mut bounds = *bounds; if let BorderPaintingMode::Collapse(collapsed_borders) = border_painting_mode { collapsed_borders.adjust_border_bounds_for_painting(&mut bounds, style.writing_mode) } // Append the border to the display list. display_list.push(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), border_widths: border.to_physical(style.writing_mode), color: SideOffsets2D::new(colors.top.to_gfx_color(), colors.right.to_gfx_color(), colors.bottom.to_gfx_color(), colors.left.to_gfx_color()), style: border_style, radius: build_border_radius(&bounds, border_style_struct), }), level); } fn build_display_list_for_outline_if_applicable(&self, style: &ComputedValues, display_list: &mut DisplayList, bounds: &Rect<Au>, clip: &ClippingRegion) { let width = style.get_outline().outline_width; if width == Au(0) { return } let outline_style = style.get_outline().outline_style; if outline_style == border_style::T::none { return } // Outlines are not accounted for in the dimensions of the border box, so adjust the // absolute bounds. let mut bounds = *bounds; let offset = width + style.get_outline().outline_offset; bounds.origin.x = bounds.origin.x - offset; bounds.origin.y = bounds.origin.y - offset; bounds.size.width = bounds.size.width + offset + offset; bounds.size.height = bounds.size.height + offset + offset; // Append the outline to the display list. let color = style.resolve_color(style.get_outline().outline_color).to_gfx_color(); display_list.outlines.push_back(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(bounds, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), border_widths: SideOffsets2D::new_all_same(width), color: SideOffsets2D::new_all_same(color), style: SideOffsets2D::new_all_same(outline_style), radius: Default::default(), })) } fn build_debug_borders_around_text_fragments(&self, style: &ComputedValues, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, stacking_relative_content_box: &Rect<Au>, text_fragment: &ScannedTextFragmentInfo, clip: &ClippingRegion) { // FIXME(pcwalton, #2795): Get the real container size. let container_size = Size2D::zero(); // Compute the text fragment bounds and draw a border surrounding them. display_list.content.push_back(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(*stacking_relative_border_box, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), border_widths: SideOffsets2D::new_all_same(Au::from_px(1)), color: SideOffsets2D::new_all_same(color::rgb(0, 0, 200)), style: SideOffsets2D::new_all_same(border_style::T::solid), radius: Default::default(), })); // Draw a rectangle representing the baselines. let mut baseline = LogicalRect::from_physical(self.style.writing_mode, *stacking_relative_content_box, container_size); baseline.start.b = baseline.start.b + text_fragment.run.ascent(); baseline.size.block = Au(0); let baseline = baseline.to_physical(self.style.writing_mode, container_size); let line_display_item = box LineDisplayItem { base: BaseDisplayItem::new(baseline, DisplayItemMetadata::new(self.node, style, Cursor::DefaultCursor), (*clip).clone()), color: color::rgb(0, 200, 0), style: border_style::T::dashed, }; display_list.content.push_back(DisplayItem::LineClass(line_display_item)); } fn build_debug_borders_around_fragment(&self, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, clip: &ClippingRegion) { // This prints a debug border around the border of this fragment. display_list.content.push_back(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(*stacking_relative_border_box, DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor), (*clip).clone()), border_widths: SideOffsets2D::new_all_same(Au::from_px(1)), color: SideOffsets2D::new_all_same(color::rgb(0, 0, 200)), style: SideOffsets2D::new_all_same(border_style::T::solid), radius: Default::default(), })); } fn calculate_style_specified_clip(&self, parent_clip: &ClippingRegion, stacking_relative_border_box: &Rect<Au>) -> ClippingRegion { // Account for `clip` per CSS 2.1 § 11.1.2. let style_clip_rect = match (self.style().get_box().position, self.style().get_effects().clip.0) { (position::T::absolute, Some(style_clip_rect)) => style_clip_rect, _ => return (*parent_clip).clone(), }; // FIXME(pcwalton, #2795): Get the real container size. let clip_origin = Point2D::new(stacking_relative_border_box.origin.x + style_clip_rect.left, stacking_relative_border_box.origin.y + style_clip_rect.top); let right = style_clip_rect.right.unwrap_or(stacking_relative_border_box.size.width); let bottom = style_clip_rect.bottom.unwrap_or(stacking_relative_border_box.size.height); let clip_size = Size2D::new(right - clip_origin.x, bottom - clip_origin.y); (*parent_clip).clone().intersect_rect(&Rect::new(clip_origin, clip_size)) } fn build_display_list(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, stacking_relative_flow_origin: &Point2D<Au>, relative_containing_block_size: &LogicalSize<Au>, relative_containing_block_mode: WritingMode, border_painting_mode: BorderPaintingMode, background_and_border_level: BackgroundAndBorderLevel, clip: &ClippingRegion, stacking_relative_display_port: &Rect<Au>) { if self.style().get_inheritedbox().visibility != visibility::T::visible { return } // Compute the fragment position relative to the parent stacking context. If the fragment // itself establishes a stacking context, then the origin of its position will be (0, 0) // for the purposes of this computation. let stacking_relative_border_box = self.stacking_relative_border_box(stacking_relative_flow_origin, relative_containing_block_size, relative_containing_block_mode, CoordinateSystem::Own); debug!("Fragment::build_display_list at rel={:?}, abs={:?}, flow origin={:?}: {:?}", self.border_box, stacking_relative_border_box, stacking_relative_flow_origin, self); if !stacking_relative_border_box.intersects(stacking_relative_display_port) { debug!("Fragment::build_display_list: outside display port"); return } // Calculate the clip rect. If there's nothing to render at all, don't even construct // display list items. let clip = self.calculate_style_specified_clip(clip, &stacking_relative_border_box); if !clip.might_intersect_rect(&stacking_relative_border_box) { return; } debug!("Fragment::build_display_list: intersected. Adding display item..."); if self.is_primary_fragment() { let level = StackingLevel::from_background_and_border_level(background_and_border_level); // Add shadows, background, borders, and outlines, if applicable. if let Some(ref inline_context) = self.inline_context { for node in inline_context.nodes.iter().rev() { self.build_display_list_for_box_shadow_if_applicable( &*node.style, display_list, layout_context, level, &stacking_relative_border_box, &clip); self.build_display_list_for_background_if_applicable( &*node.style, display_list, layout_context, level, &stacking_relative_border_box, &clip); let mut style = node.style.clone(); properties::modify_border_style_for_inline_sides( &mut style, node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT), node.flags.contains(LAST_FRAGMENT_OF_ELEMENT)); self.build_display_list_for_borders_if_applicable( &*style, border_painting_mode, display_list, &stacking_relative_border_box, level, &clip); self.build_display_list_for_outline_if_applicable( &*node.style, display_list, &stacking_relative_border_box, &clip); } } if !self.is_scanned_text_fragment() { self.build_display_list_for_box_shadow_if_applicable(&*self.style, display_list, layout_context, level, &stacking_relative_border_box, &clip); self.build_display_list_for_background_if_applicable(&*self.style, display_list, layout_context, level, &stacking_relative_border_box, &clip); self.build_display_list_for_borders_if_applicable(&*self.style, border_painting_mode, display_list, &stacking_relative_border_box, level, &clip); self.build_display_list_for_outline_if_applicable(&*self.style, display_list, &stacking_relative_border_box, &clip); } } // Create special per-fragment-type display items. self.build_fragment_type_specific_display_items(display_list, &stacking_relative_border_box, &clip); if opts::get().show_debug_fragment_borders { self.build_debug_borders_around_fragment(display_list, &stacking_relative_border_box, &clip) } // If this is an iframe, then send its position and size up to the constellation. // // FIXME(pcwalton): Doing this during display list construction seems potentially // problematic if iframes are outside the area we're computing the display list for, since // they won't be able to reflow at all until the user scrolls to them. Perhaps we should // separate this into two parts: first we should send the size only to the constellation // once that's computed during assign-block-sizes, and second we should should send the // origin to the constellation here during display list construction. This should work // because layout for the iframe only needs to know size, and origin is only relevant if // the iframe is actually going to be displayed. if let SpecificFragmentInfo::Iframe(ref iframe_fragment) = self.specific { let stacking_relative_border_box_in_parent_coordinate_system = self.stacking_relative_border_box(stacking_relative_flow_origin, relative_containing_block_size, relative_containing_block_mode, CoordinateSystem::Parent); self.finalize_position_and_size_of_iframe( &**iframe_fragment, stacking_relative_border_box_in_parent_coordinate_system.origin, layout_context) } } fn build_fragment_type_specific_display_items(&mut self, display_list: &mut DisplayList, stacking_relative_border_box: &Rect<Au>, clip: &ClippingRegion) { // Compute the context box position relative to the parent stacking context. let stacking_relative_content_box = self.stacking_relative_content_box(stacking_relative_border_box); match self.specific { SpecificFragmentInfo::ScannedText(ref text_fragment) => { // Create items for shadows. // // NB: According to CSS-BACKGROUNDS, text shadows render in *reverse* order (front // to back). let text_color = self.style().get_color().color; for text_shadow in self.style.get_effects().text_shadow.0.iter().rev() { let offset = &Point2D::new(text_shadow.offset_x, text_shadow.offset_y); let color = self.style().resolve_color(text_shadow.color); self.build_display_list_for_text_fragment(display_list, &**text_fragment, color, &stacking_relative_content_box, Some(text_shadow.blur_radius), offset, clip); } // Create the main text display item. self.build_display_list_for_text_fragment(display_list, &**text_fragment, text_color, &stacking_relative_content_box, None, &Point2D::new(Au(0), Au(0)), clip); if opts::get().show_debug_fragment_borders { self.build_debug_borders_around_text_fragments(self.style(), display_list, stacking_relative_border_box, &stacking_relative_content_box, &**text_fragment, clip) } } SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(..) | SpecificFragmentInfo::Iframe(..) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) => { if opts::get().show_debug_fragment_borders { self.build_debug_borders_around_fragment(display_list, stacking_relative_border_box, clip); } } SpecificFragmentInfo::Image(ref mut image_fragment) => { // Place the image into the display list. if let Some(ref image) = image_fragment.image { display_list.content.push_back(DisplayItem::ImageClass(box ImageDisplayItem { base: BaseDisplayItem::new(stacking_relative_content_box, DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor), (*clip).clone()), image: image.clone(), stretch_size: stacking_relative_content_box.size, image_rendering: self.style.get_effects().image_rendering.clone(), })); } } SpecificFragmentInfo::Canvas(ref canvas_fragment_info) => { // TODO(ecoal95): make the canvas with a renderer use the custom layer let width = canvas_fragment_info.replaced_image_fragment_info .computed_inline_size.map_or(0, |w| w.to_px() as usize); let height = canvas_fragment_info.replaced_image_fragment_info .computed_block_size.map_or(0, |h| h.to_px() as usize); let (sender, receiver) = ipc::channel::<IpcSharedMemory>().unwrap(); let canvas_data = match canvas_fragment_info.ipc_renderer { Some(ref ipc_renderer) => { ipc_renderer.lock().unwrap().send(CanvasMsg::FromLayout( FromLayoutMsg::SendPixelContents(sender))).unwrap(); receiver.recv().unwrap() }, None => IpcSharedMemory::from_byte(0xFFu8, width * height * 4), }; display_list.content.push_back(DisplayItem::ImageClass(box ImageDisplayItem { base: BaseDisplayItem::new(stacking_relative_content_box, DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor), (*clip).clone()), image: Arc::new(Image { width: width as u32, height: height as u32, format: PixelFormat::RGBA8, bytes: canvas_data, }), stretch_size: stacking_relative_content_box.size, image_rendering: image_rendering::T::Auto, })); } SpecificFragmentInfo::UnscannedText(_) => { panic!("Shouldn't see unscanned fragments here.") } SpecificFragmentInfo::TableColumn(_) => { panic!("Shouldn't see table column fragments here.") } } } fn create_stacking_context(&self, base_flow: &BaseFlow, display_list: Box<DisplayList>, layout_context: &LayoutContext, needs_layer: StackingContextLayerNecessity, mode: StackingContextCreationMode) -> Arc<StackingContext> { let border_box = match mode { StackingContextCreationMode::Normal | StackingContextCreationMode::OuterScrollWrapper => { self.stacking_relative_border_box(&base_flow.stacking_relative_position, &base_flow.early_absolute_position_info .relative_containing_block_size, base_flow.early_absolute_position_info .relative_containing_block_mode, CoordinateSystem::Parent) } StackingContextCreationMode::InnerScrollWrapper => { Rect::new(ZERO_POINT, base_flow.overflow.size) } }; let overflow = match mode { StackingContextCreationMode::Normal => { // First, compute the offset of our border box (including relative positioning) // from our flow origin, since that is what `BaseFlow::overflow` is relative to. let border_box_offset = border_box.translate(&-base_flow.stacking_relative_position).origin; // Then, using that, compute our overflow region relative to our border box. base_flow.overflow.translate(&-border_box_offset) } StackingContextCreationMode::InnerScrollWrapper | StackingContextCreationMode::OuterScrollWrapper => { Rect::new(ZERO_POINT, border_box.size) } }; let mut transform = Matrix4::identity(); if let Some(ref operations) = self.style().get_effects().transform.0 { let transform_origin = self.style().get_effects().transform_origin; let transform_origin = Point3D::new(model::specified(transform_origin.horizontal, border_box.size.width).to_f32_px(), model::specified(transform_origin.vertical, border_box.size.height).to_f32_px(), transform_origin.depth.to_f32_px()); let pre_transform = Matrix4::create_translation(transform_origin.x, transform_origin.y, transform_origin.z); let post_transform = Matrix4::create_translation(-transform_origin.x, -transform_origin.y, -transform_origin.z); for operation in operations { let matrix = match *operation { transform::ComputedOperation::Rotate(ax, ay, az, theta) => { let theta = 2.0f32 * f32::consts::PI - theta.radians(); Matrix4::create_rotation(ax, ay, az, theta) } transform::ComputedOperation::Perspective(d) => { Matrix4::create_perspective(d.to_f32_px()) } transform::ComputedOperation::Scale(sx, sy, sz) => { Matrix4::create_scale(sx, sy, sz) } transform::ComputedOperation::Translate(tx, ty, tz) => { let tx = model::specified(tx, border_box.size.width).to_f32_px(); let ty = model::specified(ty, border_box.size.height).to_f32_px(); let tz = tz.to_f32_px(); Matrix4::create_translation(tx, ty, tz) } transform::ComputedOperation::Matrix(m) => { m.to_gfx_matrix() } transform::ComputedOperation::Skew(sx, sy) => { Matrix4::create_skew(sx, sy) } }; transform = transform.mul(&matrix); } transform = pre_transform.mul(&transform).mul(&post_transform); } let perspective = match self.style().get_effects().perspective { LengthOrNone::Length(d) => { let perspective_origin = self.style().get_effects().perspective_origin; let perspective_origin = Point2D::new(model::specified(perspective_origin.horizontal, border_box.size.width).to_f32_px(), model::specified(perspective_origin.vertical, border_box.size.height).to_f32_px()); let pre_transform = Matrix4::create_translation(perspective_origin.x, perspective_origin.y, 0.0); let post_transform = Matrix4::create_translation(-perspective_origin.x, -perspective_origin.y, 0.0); let perspective_matrix = Matrix4::create_perspective(d.to_f32_px()); pre_transform.mul(&perspective_matrix).mul(&post_transform) } LengthOrNone::None => { Matrix4::identity() } }; // Create the filter pipeline. let effects = self.style().get_effects(); let mut filters = effects.filter.clone(); if effects.opacity != 1.0 { filters.push(Filter::Opacity(effects.opacity)) } // Ensure every canvas has a layer let (scroll_policy, layer_id) = match needs_layer { StackingContextLayerNecessity::Always(layer_id, scroll_policy) => (scroll_policy, Some(layer_id)), StackingContextLayerNecessity::IfCanvas(layer_id) => { if let SpecificFragmentInfo::Canvas(_) = self.specific { (ScrollPolicy::Scrollable, Some(layer_id)) } else { (ScrollPolicy::Scrollable, None) } } }; // If it's a canvas we must propagate the layer and the renderer to the paint // task if let SpecificFragmentInfo::Canvas(ref fragment_info) = self.specific { let layer_id = layer_id.unwrap(); if let Some(ref ipc_renderer) = fragment_info.ipc_renderer { layout_context.shared .canvas_layers_sender .send((layer_id, (*ipc_renderer.lock().unwrap()).clone())).unwrap(); } } let scrolls_overflow_area = mode == StackingContextCreationMode::OuterScrollWrapper; let transform_style = self.style().get_used_transform_style(); let establishes_3d_context = scrolls_overflow_area || transform_style == transform_style::T::flat; Arc::new(StackingContext::new(display_list, &border_box, &overflow, self.style().get_box().z_index.number_or_zero(), filters, self.style().get_effects().mix_blend_mode, transform, perspective, establishes_3d_context, scrolls_overflow_area, scroll_policy, layer_id)) } #[inline(never)] fn finalize_position_and_size_of_iframe(&self, iframe_fragment: &IframeFragmentInfo, offset: Point2D<Au>, layout_context: &LayoutContext) { let border_padding = (self.border_padding).to_physical(self.style.writing_mode); let content_size = self.content_box().size.to_physical(self.style.writing_mode); let iframe_rect = Rect::new(Point2D::new((offset.x + border_padding.left).to_f32_px(), (offset.y + border_padding.top).to_f32_px()), Size2D::new(content_size.width.to_f32_px(), content_size.height.to_f32_px())); debug!("finalizing position and size of iframe for {:?},{:?}", iframe_fragment.pipeline_id, iframe_fragment.subpage_id); let ConstellationChan(ref chan) = layout_context.shared.constellation_chan; chan.send(ConstellationMsg::FrameRect(iframe_fragment.pipeline_id, iframe_fragment.subpage_id, iframe_rect)).unwrap(); } fn clipping_region_for_children(&self, current_clip: &ClippingRegion, stacking_relative_border_box: &Rect<Au>, is_absolutely_positioned: bool) -> ClippingRegion { // Don't clip if we're text. if self.is_scanned_text_fragment() { return (*current_clip).clone() } // Account for style-specified `clip`. let mut current_clip = self.calculate_style_specified_clip(current_clip, stacking_relative_border_box); // Clip according to the values of `overflow-x` and `overflow-y`. // // TODO(pcwalton): Support scrolling of non-absolutely-positioned elements. // FIXME(pcwalton): This may be more complex than it needs to be, since it seems to be // impossible with the computed value rules as they are to have `overflow-x: visible` with // `overflow-y: <scrolling>` or vice versa! match (self.style.get_box().overflow_x, is_absolutely_positioned) { (overflow_x::T::hidden, _) | (overflow_x::T::auto, false) | (overflow_x::T::scroll, false) => { let mut bounds = current_clip.bounding_rect(); let max_x = cmp::min(bounds.max_x(), stacking_relative_border_box.max_x()); bounds.origin.x = cmp::max(bounds.origin.x, stacking_relative_border_box.origin.x); bounds.size.width = max_x - bounds.origin.x; current_clip = current_clip.intersect_rect(&bounds) } _ => {} } match (self.style.get_box().overflow_y.0, is_absolutely_positioned) { (overflow_x::T::hidden, _) | (overflow_x::T::auto, false) | (overflow_x::T::scroll, false) => { let mut bounds = current_clip.bounding_rect(); let max_y = cmp::min(bounds.max_y(), stacking_relative_border_box.max_y()); bounds.origin.y = cmp::max(bounds.origin.y, stacking_relative_border_box.origin.y); bounds.size.height = max_y - bounds.origin.y; current_clip = current_clip.intersect_rect(&bounds) } _ => {} } current_clip } fn build_display_list_for_text_fragment(&self, display_list: &mut DisplayList, text_fragment: &ScannedTextFragmentInfo, text_color: RGBA, stacking_relative_content_box: &Rect<Au>, shadow_blur_radius: Option<Au>, offset: &Point2D<Au>, clip: &ClippingRegion) { // Determine the orientation and cursor to use. let (orientation, cursor) = if self.style.writing_mode.is_vertical() { if self.style.writing_mode.is_sideways_left() { (TextOrientation::SidewaysLeft, Cursor::VerticalTextCursor) } else { (TextOrientation::SidewaysRight, Cursor::VerticalTextCursor) } } else { (TextOrientation::Upright, Cursor::TextCursor) }; // Compute location of the baseline. // // FIXME(pcwalton): Get the real container size.<|fim▁hole|> let stacking_relative_content_box = stacking_relative_content_box.translate(offset); let baseline_origin = stacking_relative_content_box.origin + LogicalPoint::new(self.style.writing_mode, Au(0), metrics.ascent).to_physical(self.style.writing_mode, container_size); // Create the text display item. display_list.content.push_back(DisplayItem::TextClass(box TextDisplayItem { base: BaseDisplayItem::new(stacking_relative_content_box, DisplayItemMetadata::new(self.node, self.style(), cursor), (*clip).clone()), text_run: text_fragment.run.clone(), range: text_fragment.range, text_color: text_color.to_gfx_color(), orientation: orientation, baseline_origin: baseline_origin, blur_radius: shadow_blur_radius.unwrap_or(Au(0)), })); // Create display items for text decorations. let mut text_decorations = self.style() .get_inheritedtext() ._servo_text_decorations_in_effect; if shadow_blur_radius.is_some() { // If we're painting a shadow, paint the decorations the same color as the shadow. text_decorations.underline = text_decorations.underline.map(|_| text_color); text_decorations.overline = text_decorations.overline.map(|_| text_color); text_decorations.line_through = text_decorations.line_through.map(|_| text_color); } let stacking_relative_content_box = LogicalRect::from_physical(self.style.writing_mode, stacking_relative_content_box, container_size); if let Some(ref underline_color) = text_decorations.underline { let mut stacking_relative_box = stacking_relative_content_box; stacking_relative_box.start.b = stacking_relative_content_box.start.b + metrics.ascent - metrics.underline_offset; stacking_relative_box.size.block = metrics.underline_size; self.build_display_list_for_text_decoration(display_list, underline_color, &stacking_relative_box, clip, shadow_blur_radius.unwrap_or(Au(0))) } if let Some(ref overline_color) = text_decorations.overline { let mut stacking_relative_box = stacking_relative_content_box; stacking_relative_box.size.block = metrics.underline_size; self.build_display_list_for_text_decoration(display_list, overline_color, &stacking_relative_box, clip, shadow_blur_radius.unwrap_or(Au(0))) } if let Some(ref line_through_color) = text_decorations.line_through { let mut stacking_relative_box = stacking_relative_content_box; stacking_relative_box.start.b = stacking_relative_box.start.b + metrics.ascent - metrics.strikeout_offset; stacking_relative_box.size.block = metrics.strikeout_size; self.build_display_list_for_text_decoration(display_list, line_through_color, &stacking_relative_box, clip, shadow_blur_radius.unwrap_or(Au(0))) } } fn build_display_list_for_text_decoration(&self, display_list: &mut DisplayList, color: &RGBA, stacking_relative_box: &LogicalRect<Au>, clip: &ClippingRegion, blur_radius: Au) { // Perhaps surprisingly, text decorations are box shadows. This is because they may need // to have blur in the case of `text-shadow`, and this doesn't hurt performance because box // shadows are optimized into essentially solid colors if there is no need for the blur. // // FIXME(pcwalton, #2795): Get the real container size. let container_size = Size2D::zero(); let stacking_relative_box = stacking_relative_box.to_physical(self.style.writing_mode, container_size); let metadata = DisplayItemMetadata::new(self.node, &*self.style, Cursor::DefaultCursor); display_list.content.push_back(DisplayItem::BoxShadowClass(box BoxShadowDisplayItem { base: BaseDisplayItem::new(shadow_bounds(&stacking_relative_box, blur_radius, Au(0)), metadata, (*clip).clone()), box_bounds: stacking_relative_box, color: color.to_gfx_color(), offset: ZERO_POINT, blur_radius: blur_radius, spread_radius: Au(0), clip_mode: BoxShadowClipMode::None, })) } } pub trait BlockFlowDisplayListBuilding { fn build_display_list_for_block_base(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode, background_border_level: BackgroundAndBorderLevel); fn build_display_list_for_static_block(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode, background_border_level: BackgroundAndBorderLevel); fn build_display_list_for_absolutely_positioned_block( &mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode); fn build_display_list_for_floating_block(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode); fn build_display_list_for_block(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode); } impl BlockFlowDisplayListBuilding for BlockFlow { fn build_display_list_for_block_base(&mut self, display_list: &mut DisplayList, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode, background_border_level: BackgroundAndBorderLevel) { // Add the box that starts the block context. let clip = if self.fragment.establishes_stacking_context() { self.base.clip.translate(&-self.base.stacking_relative_position) } else { self.base.clip.clone() }; self.fragment .build_display_list(display_list, layout_context, &self.base.stacking_relative_position, &self.base .early_absolute_position_info .relative_containing_block_size, self.base .early_absolute_position_info .relative_containing_block_mode, border_painting_mode, background_border_level, &clip, &self.base.stacking_relative_position_of_display_port); // Add children. for kid in self.base.children.iter_mut() { flow::mut_base(kid).display_list_building_result.add_to(display_list); } self.base.build_display_items_for_debugging_tint(display_list, self.fragment.node); } fn build_display_list_for_static_block(&mut self, mut display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode, background_border_level: BackgroundAndBorderLevel) { self.build_display_list_for_block_base(&mut *display_list, layout_context, border_painting_mode, background_border_level); self.base.display_list_building_result = if self.fragment.flags.contains(HAS_LAYER) { let scroll_policy = if self.is_fixed() { ScrollPolicy::FixedPosition } else { ScrollPolicy::Scrollable }; let stacking_context = self.fragment.create_stacking_context( &self.base, display_list, layout_context, StackingContextLayerNecessity::Always(self.layer_id(0), scroll_policy), StackingContextCreationMode::Normal); DisplayListBuildingResult::StackingContext(stacking_context) } else if self.fragment.establishes_stacking_context() { DisplayListBuildingResult::StackingContext( self.fragment.create_stacking_context( &self.base, display_list, layout_context, StackingContextLayerNecessity::IfCanvas(self.layer_id(0)), StackingContextCreationMode::Normal)) } else { match self.fragment.style.get_box().position { position::T::static_ => {} _ => { display_list.form_pseudo_stacking_context_for_positioned_content(); } } DisplayListBuildingResult::Normal(display_list) } } fn build_display_list_for_absolutely_positioned_block( &mut self, mut display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode) { // If `overflow: scroll` is in effect, we add this fragment's display items to a new // stacking context. let outer_display_list_for_overflow_scroll = match (self.fragment.style().get_box().overflow_x, self.fragment.style().get_box().overflow_y.0) { (overflow_x::T::auto, _) | (overflow_x::T::scroll, _) | (_, overflow_x::T::auto) | (_, overflow_x::T::scroll) => { // Create a separate display list for our own fragment. let mut outer_display_list_for_overflow_scroll = box DisplayList::new(); let clip = self.base.clip.translate(&-self.base.stacking_relative_position); self.fragment.build_display_list( &mut outer_display_list_for_overflow_scroll, layout_context, &self.base.stacking_relative_position, &self.base.early_absolute_position_info.relative_containing_block_size, self.base.early_absolute_position_info.relative_containing_block_mode, border_painting_mode, BackgroundAndBorderLevel::RootOfStackingContext, &clip, &self.base.stacking_relative_position_of_display_port); // Add the fragments of our children to the display list we'll use for the inner // stacking context. for kid in self.base.children.iter_mut() { flow::mut_base(kid).display_list_building_result.add_to(&mut *display_list); } Some(outer_display_list_for_overflow_scroll) } _ => { let establishes_stacking_context = self.fragment.establishes_stacking_context(); let background_and_border_level = if establishes_stacking_context { BackgroundAndBorderLevel::RootOfStackingContext } else { BackgroundAndBorderLevel::Block }; self.build_display_list_for_block_base(&mut *display_list, layout_context, border_painting_mode, background_and_border_level); None } }; if !self.fragment.flags.contains(HAS_LAYER) { if !self.fragment.establishes_stacking_context() { display_list.form_pseudo_stacking_context_for_positioned_content(); self.base.display_list_building_result = DisplayListBuildingResult::Normal(display_list); } else { self.base.display_list_building_result = DisplayListBuildingResult::StackingContext( self.fragment.create_stacking_context( &self.base, display_list, layout_context, StackingContextLayerNecessity::IfCanvas(self.layer_id(0)), StackingContextCreationMode::Normal)); } return } // If we got here, then we need a new layer. let scroll_policy = if self.is_fixed() { ScrollPolicy::FixedPosition } else { ScrollPolicy::Scrollable }; let stacking_context_creation_mode = if outer_display_list_for_overflow_scroll.is_some() { StackingContextCreationMode::InnerScrollWrapper } else { StackingContextCreationMode::Normal }; let layer_id = if outer_display_list_for_overflow_scroll.is_some() { self.layer_id(FAKE_FRAGMENT_ID_FOR_OVERFLOW_SCROLL) } else { self.layer_id(0) }; let stacking_context = self.fragment.create_stacking_context( &self.base, display_list, layout_context, StackingContextLayerNecessity::Always(layer_id, scroll_policy), stacking_context_creation_mode); let outermost_stacking_context = match outer_display_list_for_overflow_scroll { Some(mut outer_display_list_for_overflow_scroll) => { outer_display_list_for_overflow_scroll.children.push_back(stacking_context); self.fragment.create_stacking_context( &self.base, outer_display_list_for_overflow_scroll, layout_context, StackingContextLayerNecessity::Always(self.layer_id(0), scroll_policy), StackingContextCreationMode::OuterScrollWrapper) } None => stacking_context, }; self.base.display_list_building_result = DisplayListBuildingResult::StackingContext(outermost_stacking_context) } fn build_display_list_for_floating_block(&mut self, mut display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode) { self.build_display_list_for_block_base(&mut *display_list, layout_context, border_painting_mode, BackgroundAndBorderLevel::RootOfStackingContext); display_list.form_float_pseudo_stacking_context(); self.base.display_list_building_result = if self.fragment.establishes_stacking_context() { DisplayListBuildingResult::StackingContext( self.fragment.create_stacking_context( &self.base, display_list, layout_context, StackingContextLayerNecessity::IfCanvas(self.layer_id(0)), StackingContextCreationMode::Normal)) } else { DisplayListBuildingResult::Normal(display_list) } } fn build_display_list_for_block(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext, border_painting_mode: BorderPaintingMode) { if self.base.flags.is_float() { // TODO(#2009, pcwalton): This is a pseudo-stacking context. We need to merge `z-index: // auto` kids into the parent stacking context, when that is supported. self.build_display_list_for_floating_block(display_list, layout_context, border_painting_mode); } else if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { self.build_display_list_for_absolutely_positioned_block(display_list, layout_context, border_painting_mode); } else { self.build_display_list_for_static_block(display_list, layout_context, border_painting_mode, BackgroundAndBorderLevel::Block); } } } pub trait InlineFlowDisplayListBuilding { fn build_display_list_for_inline(&mut self, layout_context: &LayoutContext); } impl InlineFlowDisplayListBuilding for InlineFlow { fn build_display_list_for_inline(&mut self, layout_context: &LayoutContext) { // TODO(#228): Once we form lines and have their cached bounds, we can be smarter and // not recurse on a line if nothing in it can intersect the dirty region. debug!("Flow: building display list for {} inline fragments", self.fragments.len()); let mut display_list = box DisplayList::new(); let mut has_stacking_context = false; for fragment in &mut self.fragments.fragments { fragment.build_display_list(&mut *display_list, layout_context, &self.base.stacking_relative_position, &self.base .early_absolute_position_info .relative_containing_block_size, self.base .early_absolute_position_info .relative_containing_block_mode, BorderPaintingMode::Separate, BackgroundAndBorderLevel::Content, &self.base.clip, &self.base.stacking_relative_position_of_display_port); has_stacking_context = fragment.establishes_stacking_context(); match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut block_flow) => { let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut block_flow) => { let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } SpecificFragmentInfo::InlineAbsolute(ref mut block_flow) => { let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref); flow::mut_base(block_flow).display_list_building_result .add_to(&mut *display_list) } _ => {} } } if !self.fragments.fragments.is_empty() { self.base.build_display_items_for_debugging_tint(&mut *display_list, self.fragments.fragments[0].node); } // FIXME(Savago): fix Fragment::establishes_stacking_context() for absolute positioned item // and remove the check for filter presence. Further details on #5812. has_stacking_context = has_stacking_context && { if let SpecificFragmentInfo::Canvas(_) = self.fragments.fragments[0].specific { true } else { !self.fragments.fragments[0].style().get_effects().filter.is_empty() } }; self.base.display_list_building_result = if has_stacking_context { DisplayListBuildingResult::StackingContext( self.fragments.fragments[0].create_stacking_context( &self.base, display_list, layout_context, StackingContextLayerNecessity::IfCanvas(self.layer_id(0)), StackingContextCreationMode::Normal)) } else { DisplayListBuildingResult::Normal(display_list) }; if opts::get().validate_display_list_geometry { self.base.validate_display_list_geometry(); } } } pub trait ListItemFlowDisplayListBuilding { fn build_display_list_for_list_item(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext); } impl ListItemFlowDisplayListBuilding for ListItemFlow { fn build_display_list_for_list_item(&mut self, mut display_list: Box<DisplayList>, layout_context: &LayoutContext) { // Draw the marker, if applicable. for marker in &mut self.marker_fragments { marker.build_display_list(&mut *display_list, layout_context, &self.block_flow.base.stacking_relative_position, &self.block_flow .base .early_absolute_position_info .relative_containing_block_size, self.block_flow .base .early_absolute_position_info .relative_containing_block_mode, BorderPaintingMode::Separate, BackgroundAndBorderLevel::Content, &self.block_flow.base.clip, &self.block_flow .base .stacking_relative_position_of_display_port); } // Draw the rest of the block. self.block_flow.build_display_list_for_block(display_list, layout_context, BorderPaintingMode::Separate) } } pub trait FlexFlowDisplayListBuilding { fn build_display_list_for_flex(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext); } impl FlexFlowDisplayListBuilding for FlexFlow { fn build_display_list_for_flex(&mut self, display_list: Box<DisplayList>, layout_context: &LayoutContext) { // Draw the rest of the block. self.as_mut_block().build_display_list_for_block(display_list, layout_context, BorderPaintingMode::Separate) } } trait BaseFlowDisplayListBuilding { fn build_display_items_for_debugging_tint(&self, display_list: &mut DisplayList, node: OpaqueNode); } impl BaseFlowDisplayListBuilding for BaseFlow { fn build_display_items_for_debugging_tint(&self, display_list: &mut DisplayList, node: OpaqueNode) { if !opts::get().show_debug_parallel_layout { return } let thread_id = self.thread_id; let stacking_context_relative_bounds = Rect::new(self.stacking_relative_position, self.position.size.to_physical(self.writing_mode)); let mut color = THREAD_TINT_COLORS[thread_id as usize % THREAD_TINT_COLORS.len()]; color.a = 1.0; display_list.push(DisplayItem::BorderClass(box BorderDisplayItem { base: BaseDisplayItem::new(stacking_context_relative_bounds.inflate(Au::from_px(2), Au::from_px(2)), DisplayItemMetadata { node: node, pointing: None, }, self.clip.clone()), border_widths: SideOffsets2D::new_all_same(Au::from_px(2)), color: SideOffsets2D::new_all_same(color), style: SideOffsets2D::new_all_same(border_style::T::solid), radius: BorderRadii::all_same(Au(0)), }), StackingLevel::Content); } } // A helper data structure for gradients. #[derive(Copy, Clone)] struct StopRun { start_offset: f32, end_offset: f32, start_index: usize, stop_count: usize, } fn fmin(a: f32, b: f32) -> f32 { if a < b { a } else { b } } fn position_to_offset(position: LengthOrPercentage, Au(total_length): Au) -> f32 { match position { LengthOrPercentage::Length(Au(length)) => { fmin(1.0, (length as f32) / (total_length as f32)) } LengthOrPercentage::Percentage(percentage) => percentage as f32, LengthOrPercentage::Calc(calc) => fmin(1.0, calc.percentage() + (calc.length().0 as f32) / (total_length as f32)), } } /// "Steps" as defined by CSS 2.1 § E.2. #[derive(Clone, PartialEq, Debug, Copy)] pub enum StackingLevel { /// The border and backgrounds for the root of this stacking context: steps 1 and 2. BackgroundAndBorders, /// Borders and backgrounds for block-level descendants: step 4. BlockBackgroundsAndBorders, /// All non-positioned content. Content, } impl StackingLevel { #[inline] pub fn from_background_and_border_level(level: BackgroundAndBorderLevel) -> StackingLevel { match level { BackgroundAndBorderLevel::RootOfStackingContext => StackingLevel::BackgroundAndBorders, BackgroundAndBorderLevel::Block => StackingLevel::BlockBackgroundsAndBorders, BackgroundAndBorderLevel::Content => StackingLevel::Content, } } } /// Which level to place backgrounds and borders in. pub enum BackgroundAndBorderLevel { RootOfStackingContext, Block, Content, } trait StackingContextConstruction { /// Adds the given display item at the specified level to this display list. fn push(&mut self, display_item: DisplayItem, level: StackingLevel); } impl StackingContextConstruction for DisplayList { fn push(&mut self, display_item: DisplayItem, level: StackingLevel) { match level { StackingLevel::BackgroundAndBorders => { self.background_and_borders.push_back(display_item) } StackingLevel::BlockBackgroundsAndBorders => { self.block_backgrounds_and_borders.push_back(display_item) } StackingLevel::Content => self.content.push_back(display_item), } } } /// Adjusts `content_rect` as necessary for the given spread, and blur so that the resulting /// bounding rect contains all of a shadow's ink. fn shadow_bounds(content_rect: &Rect<Au>, blur_radius: Au, spread_radius: Au) -> Rect<Au> { let inflation = spread_radius + blur_radius * BLUR_INFLATION_FACTOR; content_rect.inflate(inflation, inflation) } /// Allows a CSS color to be converted into a graphics color. pub trait ToGfxColor { /// Converts a CSS color to a graphics color. fn to_gfx_color(&self) -> Color; } impl ToGfxColor for RGBA { fn to_gfx_color(&self) -> Color { color::rgba(self.red, self.green, self.blue, self.alpha) } } /// Describes how to paint the borders. #[derive(Copy, Clone)] pub enum BorderPaintingMode<'a> { /// Paint borders separately (`border-collapse: separate`). Separate, /// Paint collapsed borders. Collapse(&'a CollapsedBordersForCell), /// Paint no borders. Hidden, } #[derive(Copy, Clone, PartialEq)] pub enum StackingContextCreationMode { Normal, OuterScrollWrapper, InnerScrollWrapper, }<|fim▁end|>
let container_size = Size2D::zero(); let metrics = &text_fragment.run.font_metrics;
<|file_name|>account.py<|end_file_name|><|fim▁begin|># Copyright 2018 Silvio Gregorini ([email protected]) # Copyright (c) 2018 Openforce Srls Unipersonale (www.openforce.it)<|fim▁hole|># License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). from odoo import models class AccountMoveLine(models.Model): _inherit = "account.move.line" def group_by_account_and_tax(self): grouped_lines = {} for line in self: group_key = (line.account_id, line.tax_line_id) if group_key not in grouped_lines: grouped_lines.update({group_key: []}) grouped_lines[group_key].append(line) return grouped_lines<|fim▁end|>
# Copyright (c) 2019 Matteo Bilotta
<|file_name|>mesh.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) 2015, Vispy Development Team. All Rights Reserved. # Distributed under the (new) BSD License. See LICENSE.txt for more info. # ----------------------------------------------------------------------------- """ A MeshVisual Visual that uses the new shader Function. """ from __future__ import division import numpy as np from .visual import Visual from .shaders import Function, Varying from ..gloo import VertexBuffer, IndexBuffer from ..geometry import MeshData from ..color import Color # Shaders for lit rendering (using phong shading) shading_vertex_template = """ varying vec3 v_normal_vec; varying vec3 v_light_vec; varying vec3 v_eye_vec; <|fim▁hole|>varying vec4 v_base_color; void main() { v_ambientk = $ambientk; v_light_color = $light_color; v_base_color = $base_color; vec4 pos_scene = $visual2scene($to_vec4($position)); vec4 normal_scene = $visual2scene(vec4($normal, 1)); vec4 origin_scene = $visual2scene(vec4(0, 0, 0, 1)); normal_scene /= normal_scene.w; origin_scene /= origin_scene.w; vec3 normal = normalize(normal_scene.xyz - origin_scene.xyz); v_normal_vec = normal; //VARYING COPY vec4 pos_front = $scene2doc(pos_scene); pos_front.z += 0.01; pos_front = $doc2scene(pos_front); pos_front /= pos_front.w; vec4 pos_back = $scene2doc(pos_scene); pos_back.z -= 0.01; pos_back = $doc2scene(pos_back); pos_back /= pos_back.w; vec3 eye = normalize(pos_front.xyz - pos_back.xyz); v_eye_vec = eye; //VARYING COPY vec3 light = normalize($light_dir.xyz); v_light_vec = light; //VARYING COPY gl_Position = $transform($to_vec4($position)); } """ shading_fragment_template = """ varying vec3 v_normal_vec; varying vec3 v_light_vec; varying vec3 v_eye_vec; varying vec4 v_ambientk; varying vec4 v_light_color; varying vec4 v_base_color; void main() { //DIFFUSE float diffusek = dot(v_light_vec, v_normal_vec); //clamp, because 0 < theta < pi/2 diffusek = clamp(diffusek, 0, 1); vec4 diffuse_color = v_light_color * diffusek; //diffuse_color.a = 1.0; //SPECULAR //reflect light wrt normal for the reflected ray, then //find the angle made with the eye float speculark = dot(reflect(v_light_vec, v_normal_vec), v_eye_vec); speculark = clamp(speculark, 0, 1); //raise to the material's shininess, multiply with a //small factor for spread speculark = 20 * pow(speculark, 200.0); vec4 specular_color = v_light_color * speculark; gl_FragColor = v_base_color * (v_ambientk + diffuse_color) + specular_color; //gl_FragColor = vec4(speculark, 0, 1, 1.0); } """ # Shader code for non lighted rendering vertex_template = """ void main() { gl_Position = $transform($to_vec4($position)); } """ fragment_template = """ void main() { gl_FragColor = $color; } """ # Functions that can be used as is (don't have template variables) # Consider these stored in a central location in vispy ... vec3to4 = Function(""" vec4 vec3to4(vec3 xyz) { return vec4(xyz, 1.0); } """) vec2to4 = Function(""" vec4 vec2to4(vec2 xyz) { return vec4(xyz, 0.0, 1.0); } """) class MeshVisual(Visual): """Mesh visual Parameters ---------- vertices : array-like | None The vertices. faces : array-like | None The faces. vertex_colors : array-like | None Colors to use for each vertex. face_colors : array-like | None Colors to use for each face. color : instance of Color The color to use. meshdata : instance of MeshData | None The meshdata. shading : str | None Shading to use. mode : str The drawing mode. **kwargs : dict Keyword arguments to pass to `Visual`. """ def __init__(self, vertices=None, faces=None, vertex_colors=None, face_colors=None, color=(0.5, 0.5, 1, 1), meshdata=None, shading=None, mode='triangles', **kwargs): # Function for computing phong shading # self._phong = Function(phong_template) # Visual.__init__ -> prepare_transforms() -> uses shading self.shading = shading if shading is not None: Visual.__init__(self, vcode=shading_vertex_template, fcode=shading_fragment_template, **kwargs) else: Visual.__init__(self, vcode=vertex_template, fcode=fragment_template, **kwargs) self.set_gl_state('translucent', depth_test=True, cull_face=False) # Define buffers self._vertices = VertexBuffer(np.zeros((0, 3), dtype=np.float32)) self._normals = None self._faces = IndexBuffer() self._colors = VertexBuffer(np.zeros((0, 4), dtype=np.float32)) self._normals = VertexBuffer(np.zeros((0, 3), dtype=np.float32)) # Uniform color self._color = Color(color) # varyings self._color_var = Varying('v_color', dtype='vec4') # Init self._bounds = None # Note we do not call subclass set_data -- often the signatures # do no match. MeshVisual.set_data(self, vertices=vertices, faces=faces, vertex_colors=vertex_colors, face_colors=face_colors, meshdata=meshdata, color=color) # primitive mode self._draw_mode = mode self.freeze() def set_data(self, vertices=None, faces=None, vertex_colors=None, face_colors=None, color=None, meshdata=None): """Set the mesh data Parameters ---------- vertices : array-like | None The vertices. faces : array-like | None The faces. vertex_colors : array-like | None Colors to use for each vertex. face_colors : array-like | None Colors to use for each face. color : instance of Color The color to use. meshdata : instance of MeshData | None The meshdata. """ if meshdata is not None: self._meshdata = meshdata else: self._meshdata = MeshData(vertices=vertices, faces=faces, vertex_colors=vertex_colors, face_colors=face_colors) self._bounds = self._meshdata.get_bounds() if color is not None: self._color = Color(color) self.mesh_data_changed() @property def mode(self): """The triangle mode used to draw this mesh. Options are: * 'triangles': Draw one triangle for every three vertices (eg, [1,2,3], [4,5,6], [7,8,9) * 'triangle_strip': Draw one strip for every vertex excluding the first two (eg, [1,2,3], [2,3,4], [3,4,5]) * 'triangle_fan': Draw each triangle from the first vertex and the last two vertices (eg, [1,2,3], [1,3,4], [1,4,5]) """ return self._draw_mode @mode.setter def mode(self, m): modes = ['triangles', 'triangle_strip', 'triangle_fan'] if m not in modes: raise ValueError("Mesh mode must be one of %s" % ', '.join(modes)) self._draw_mode = m @property def mesh_data(self): """The mesh data""" return self._meshdata @property def color(self): """The uniform color for this mesh. This value is only used if per-vertex or per-face colors are not specified. """ return self._color @color.setter def color(self, c): self.set_data(color=c) def mesh_data_changed(self): self._data_changed = True self.update() def _update_data(self): md = self.mesh_data # Update vertex/index buffers if self.shading == 'smooth' and not md.has_face_indexed_data(): v = md.get_vertices() if v is None: return False if v.shape[-1] == 2: v = np.concatenate((v, np.zeros((v.shape[:-1] + (1,)))), -1) self._vertices.set_data(v, convert=True) self._normals.set_data(md.get_vertex_normals(), convert=True) self._faces.set_data(md.get_faces(), convert=True) self._index_buffer = self._faces if md.has_vertex_color(): self._colors.set_data(md.get_vertex_colors(), convert=True) elif md.has_face_color(): self._colors.set_data(md.get_face_colors(), convert=True) else: self._colors.set_data(np.zeros((0, 4), dtype=np.float32)) else: v = md.get_vertices(indexed='faces') if v is None: return False if v.shape[-1] == 2: v = np.concatenate((v, np.zeros((v.shape[:-1] + (1,)))), -1) self._vertices.set_data(v, convert=True) if self.shading == 'smooth': normals = md.get_vertex_normals(indexed='faces') self._normals.set_data(normals, convert=True) elif self.shading == 'flat': normals = md.get_face_normals(indexed='faces') self._normals.set_data(normals, convert=True) else: self._normals.set_data(np.zeros((0, 3), dtype=np.float32)) self._index_buffer = None if md.has_vertex_color(): self._colors.set_data(md.get_vertex_colors(indexed='faces'), convert=True) elif md.has_face_color(): self._colors.set_data(md.get_face_colors(indexed='faces'), convert=True) else: self._colors.set_data(np.zeros((0, 4), dtype=np.float32)) self.shared_program.vert['position'] = self._vertices # Position input handling if v.shape[-1] == 2: self.shared_program.vert['to_vec4'] = vec2to4 elif v.shape[-1] == 3: self.shared_program.vert['to_vec4'] = vec3to4 else: raise TypeError("Vertex data must have shape (...,2) or (...,3).") # Color input handling # If non-lit shading is used, then just pass the colors # Otherwise, the shader uses a base_color to represent the underlying # color, which is then lit with the lighting model colors = self._colors if self._colors.size > 0 else self._color.rgba if self.shading is None: self.shared_program.vert[self._color_var] = colors # Shading if self.shading is None: self.shared_program.frag['color'] = self._color_var else: # Normal data comes via vertex shader if self._normals.size > 0: normals = self._normals else: normals = (1., 0., 0.) self.shared_program.vert['normal'] = normals self.shared_program.vert['base_color'] = colors # Additional phong properties self.shared_program.vert['light_dir'] = (10, 5, -5) self.shared_program.vert['light_color'] = (1.0, 1.0, 1.0, 1.0) self.shared_program.vert['ambientk'] = (0.3, 0.3, 0.3, 1.0) self._data_changed = False @property def shading(self): """ The shading method used. """ return self._shading @shading.setter def shading(self, value): assert value in (None, 'flat', 'smooth') self._shading = value def _prepare_draw(self, view): if self._data_changed: if self._update_data() is False: return False self._data_changed = False def draw(self, *args, **kwds): Visual.draw(self, *args, **kwds) @staticmethod def _prepare_transforms(view): tr = view.transforms.get_transform() view.view_program.vert['transform'] = tr # .simplified if view.shading is not None: visual2scene = view.transforms.get_transform('visual', 'scene') scene2doc = view.transforms.get_transform('scene', 'document') doc2scene = view.transforms.get_transform('document', 'scene') view.shared_program.vert['visual2scene'] = visual2scene view.shared_program.vert['scene2doc'] = scene2doc view.shared_program.vert['doc2scene'] = doc2scene def _compute_bounds(self, axis, view): if self._bounds is None: return None return self._bounds[axis]<|fim▁end|>
varying vec4 v_ambientk; varying vec4 v_light_color;
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright © 2018 Cormac O'Brien // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // TODO: need to figure out an equivalence relation for read_/write_coord and read_/write_angle pub mod connect; use std::{ collections::VecDeque, error::Error, fmt, io::{BufRead, BufReader, Cursor, Read, Write}, net::{SocketAddr, UdpSocket}, }; use crate::common::{engine, util}; use byteorder::{LittleEndian, NetworkEndian, ReadBytesExt, WriteBytesExt}; use cgmath::{Deg, Vector3, Zero}; use chrono::Duration; use num::FromPrimitive; pub const MAX_MESSAGE: usize = 8192; const MAX_DATAGRAM: usize = 1024; const HEADER_SIZE: usize = 8; const MAX_PACKET: usize = HEADER_SIZE + MAX_DATAGRAM; pub const PROTOCOL_VERSION: u8 = 15; const NAME_LEN: usize = 64; const FAST_UPDATE_FLAG: u8 = 0x80; const VELOCITY_READ_FACTOR: f32 = 16.0; const VELOCITY_WRITE_FACTOR: f32 = 1.0 / VELOCITY_READ_FACTOR; const PARTICLE_DIRECTION_READ_FACTOR: f32 = 1.0 / 16.0; const PARTICLE_DIRECTION_WRITE_FACTOR: f32 = 1.0 / PARTICLE_DIRECTION_READ_FACTOR; const SOUND_ATTENUATION_WRITE_FACTOR: u8 = 64; const SOUND_ATTENUATION_READ_FACTOR: f32 = 1.0 / SOUND_ATTENUATION_WRITE_FACTOR as f32; pub static GAME_NAME: &'static str = "QUAKE"; pub const MAX_CLIENTS: usize = 16; pub const MAX_ITEMS: usize = 32; pub const DEFAULT_VIEWHEIGHT: f32 = 22.0; #[derive(Debug)] pub enum NetError { Io(::std::io::Error), InvalidData(String), Other(String), } impl NetError { pub fn with_msg<S>(msg: S) -> Self where S: AsRef<str>, { NetError::Other(msg.as_ref().to_owned()) } } impl fmt::Display for NetError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { NetError::Io(ref err) => { write!(f, "I/O error: ")?; err.fmt(f) } NetError::InvalidData(ref msg) => write!(f, "Invalid data: {}", msg), NetError::Other(ref msg) => write!(f, "{}", msg), } } } impl Error for NetError { fn description(&self) -> &str { match *self { NetError::Io(ref err) => err.description(), NetError::InvalidData(_) => "Invalid data", NetError::Other(ref msg) => &msg, } } } impl From<::std::io::Error> for NetError { fn from(error: ::std::io::Error) -> Self { NetError::Io(error) } } // the original engine treats these as bitflags, but all of them are mutually exclusive except for // NETFLAG_DATA (reliable message) and NETFLAG_EOM (end of reliable message). #[derive(Debug, Eq, FromPrimitive, PartialEq)] pub enum MsgKind { Reliable = 0x0001, Ack = 0x0002, ReliableEom = 0x0009, Unreliable = 0x0010, Ctl = 0x8000, } bitflags! { pub struct UpdateFlags: u16 { const MORE_BITS = 1 << 0; const ORIGIN_X = 1 << 1; const ORIGIN_Y = 1 << 2; const ORIGIN_Z = 1 << 3; const YAW = 1 << 4; const NO_LERP = 1 << 5; const FRAME = 1 << 6; const SIGNAL = 1 << 7; const PITCH = 1 << 8; const ROLL = 1 << 9; const MODEL = 1 << 10; const COLORMAP = 1 << 11; const SKIN = 1 << 12; const EFFECTS = 1 << 13; const LONG_ENTITY = 1 << 14; } } bitflags! { pub struct ClientUpdateFlags: u16 { const VIEW_HEIGHT = 1 << 0; const IDEAL_PITCH = 1 << 1; const PUNCH_PITCH = 1 << 2; const PUNCH_YAW = 1 << 3; const PUNCH_ROLL = 1 << 4; const VELOCITY_X = 1 << 5; const VELOCITY_Y = 1 << 6; const VELOCITY_Z = 1 << 7; // const AIM_ENT = 1 << 8; // unused const ITEMS = 1 << 9; const ON_GROUND = 1 << 10; const IN_WATER = 1 << 11; const WEAPON_FRAME = 1 << 12; const ARMOR = 1 << 13; const WEAPON = 1 << 14; } } bitflags! { pub struct SoundFlags: u8 { const VOLUME = 1 << 0; const ATTENUATION = 1 << 1; const LOOPING = 1 << 2; } } bitflags! { pub struct ItemFlags: u32 { const SHOTGUN = 0x00000001; const SUPER_SHOTGUN = 0x00000002; const NAILGUN = 0x00000004; const SUPER_NAILGUN = 0x00000008; const GRENADE_LAUNCHER = 0x00000010; const ROCKET_LAUNCHER = 0x00000020; const LIGHTNING = 0x00000040; const SUPER_LIGHTNING = 0x00000080; const SHELLS = 0x00000100; const NAILS = 0x00000200; const ROCKETS = 0x00000400; const CELLS = 0x00000800; const AXE = 0x00001000; const ARMOR_1 = 0x00002000; const ARMOR_2 = 0x00004000; const ARMOR_3 = 0x00008000; const SUPER_HEALTH = 0x00010000; const KEY_1 = 0x00020000; const KEY_2 = 0x00040000; const INVISIBILITY = 0x00080000; const INVULNERABILITY = 0x00100000; const SUIT = 0x00200000; const QUAD = 0x00400000; const SIGIL_1 = 0x10000000; const SIGIL_2 = 0x20000000; const SIGIL_3 = 0x40000000; const SIGIL_4 = 0x80000000; } } bitflags! { pub struct ButtonFlags: u8 { const ATTACK = 0x01; const JUMP = 0x02; } } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct PlayerColor { top: u8, bottom: u8, } impl PlayerColor { pub fn new(top: u8, bottom: u8) -> PlayerColor { if top > 15 { warn!("Top color index ({}) will be truncated", top); } if bottom > 15 { warn!("Bottom color index ({}) will be truncated", bottom); } PlayerColor { top, bottom } } pub fn from_bits(bits: u8) -> PlayerColor { let top = bits >> 4; let bottom = bits & 0x0F; PlayerColor { top, bottom } } pub fn bits(&self) -> u8 { self.top << 4 | (self.bottom & 0x0F) } } impl ::std::convert::From<u8> for PlayerColor { fn from(src: u8) -> PlayerColor { PlayerColor { top: src << 4, bottom: src & 0x0F, } } } #[derive(Clone, Copy, Debug)] pub struct ColorShift { pub dest_color: [u8; 3], pub percent: i32, } #[derive(Copy, Clone, Debug, Eq, FromPrimitive, PartialEq)] pub enum ClientStat { Health = 0, Frags = 1, Weapon = 2, Ammo = 3, Armor = 4, WeaponFrame = 5, Shells = 6, Nails = 7, Rockets = 8, Cells = 9, ActiveWeapon = 10, TotalSecrets = 11, TotalMonsters = 12, FoundSecrets = 13, KilledMonsters = 14, } /// Numeric codes used to identify the type of a temporary entity. #[derive(Debug, Eq, FromPrimitive, PartialEq)] pub enum TempEntityCode { Spike = 0, SuperSpike = 1, Gunshot = 2, Explosion = 3, TarExplosion = 4, Lightning1 = 5, Lightning2 = 6, WizSpike = 7, KnightSpike = 8, Lightning3 = 9, LavaSplash = 10, Teleport = 11, ColorExplosion = 12, Grapple = 13, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum PointEntityKind { Spike, SuperSpike, Gunshot, Explosion, ColorExplosion { color_start: u8, color_len: u8 }, TarExplosion, WizSpike, KnightSpike, LavaSplash, Teleport, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum BeamEntityKind { /// Lightning bolt Lightning { /// id of the lightning model to use. must be 1, 2, or 3. model_id: u8, }, /// Grappling hook cable Grapple, } #[derive(Clone, Debug, PartialEq)] pub enum TempEntity { Point { kind: PointEntityKind, origin: Vector3<f32>, }, Beam { kind: BeamEntityKind, entity_id: i16, start: Vector3<f32>, end: Vector3<f32>, }, } impl TempEntity { pub fn read_temp_entity<R>(reader: &mut R) -> Result<TempEntity, NetError> where R: BufRead + ReadBytesExt, { let code_byte = reader.read_u8()?; let code = match TempEntityCode::from_u8(code_byte) { Some(c) => c, None => { return Err(NetError::InvalidData(format!( "Temp entity code {}", code_byte ))) } }; use TempEntity::*; use TempEntityCode as Code; Ok(match code { Code::Spike | Code::SuperSpike | Code::Gunshot | Code::Explosion | Code::TarExplosion | Code::WizSpike | Code::KnightSpike | Code::LavaSplash | Code::Teleport => Point { kind: match code { Code::Spike => PointEntityKind::Spike, Code::SuperSpike => PointEntityKind::SuperSpike, Code::Gunshot => PointEntityKind::Gunshot, Code::Explosion => PointEntityKind::Explosion, Code::TarExplosion => PointEntityKind::TarExplosion, Code::WizSpike => PointEntityKind::WizSpike, Code::KnightSpike => PointEntityKind::KnightSpike, Code::LavaSplash => PointEntityKind::LavaSplash, Code::Teleport => PointEntityKind::Teleport, _ => unreachable!(), }, origin: read_coord_vector3(reader)?, }, Code::ColorExplosion => { let origin = read_coord_vector3(reader)?; let color_start = reader.read_u8()?; let color_len = reader.read_u8()?; Point { origin, kind: PointEntityKind::ColorExplosion { color_start, color_len, }, } } Code::Lightning1 | Code::Lightning2 | Code::Lightning3 => Beam { kind: BeamEntityKind::Lightning { model_id: match code { Code::Lightning1 => 1, Code::Lightning2 => 2, Code::Lightning3 => 3, _ => unreachable!(), }, }, entity_id: reader.read_i16::<LittleEndian>()?, start: read_coord_vector3(reader)?, end: read_coord_vector3(reader)?, }, Code::Grapple => Beam { kind: BeamEntityKind::Grapple, entity_id: reader.read_i16::<LittleEndian>()?, start: read_coord_vector3(reader)?, end: read_coord_vector3(reader)?, }, }) } pub fn write_temp_entity<W>(&self, writer: &mut W) -> Result<(), NetError> where W: WriteBytesExt, { use TempEntityCode as Code; match *self { TempEntity::Point { kind, origin } => { use PointEntityKind as Pk; match kind { Pk::Spike | Pk::SuperSpike | Pk::Gunshot | Pk::Explosion | Pk::TarExplosion | Pk::WizSpike | Pk::KnightSpike<|fim▁hole|> | Pk::LavaSplash | Pk::Teleport => { let code = match kind { Pk::Spike => Code::Spike, Pk::SuperSpike => Code::SuperSpike, Pk::Gunshot => Code::Gunshot, Pk::Explosion => Code::Explosion, Pk::TarExplosion => Code::TarExplosion, Pk::WizSpike => Code::WizSpike, Pk::KnightSpike => Code::KnightSpike, Pk::LavaSplash => Code::LavaSplash, Pk::Teleport => Code::Teleport, _ => unreachable!(), }; // write code writer.write_u8(code as u8)?; } PointEntityKind::ColorExplosion { color_start, color_len, } => { // write code and colors writer.write_u8(Code::ColorExplosion as u8)?; writer.write_u8(color_start)?; writer.write_u8(color_len)?; } }; write_coord_vector3(writer, origin)?; } TempEntity::Beam { kind, entity_id, start, end, } => { let code = match kind { BeamEntityKind::Lightning { model_id } => match model_id { 1 => Code::Lightning1, 2 => Code::Lightning2, 3 => Code::Lightning3, // TODO: error _ => panic!("invalid lightning model id: {}", model_id), }, BeamEntityKind::Grapple => Code::Grapple, }; writer.write_i16::<LittleEndian>(entity_id)?; writer.write_u8(code as u8)?; write_coord_vector3(writer, start)?; write_coord_vector3(writer, end)?; } } Ok(()) } } #[derive(Copy, Clone, Ord, Debug, Eq, FromPrimitive, PartialOrd, PartialEq)] pub enum SignOnStage { Not = 0, Prespawn = 1, ClientInfo = 2, Begin = 3, Done = 4, } bitflags! { pub struct EntityEffects: u8 { const BRIGHT_FIELD = 0b0001; const MUZZLE_FLASH = 0b0010; const BRIGHT_LIGHT = 0b0100; const DIM_LIGHT = 0b1000; } } #[derive(Clone, Debug)] pub struct EntityState { pub origin: Vector3<f32>, pub angles: Vector3<Deg<f32>>, pub model_id: usize, pub frame_id: usize, // TODO: more specific types for these pub colormap: u8, pub skin_id: usize, pub effects: EntityEffects, } impl EntityState { pub fn uninitialized() -> EntityState { EntityState { origin: Vector3::new(0.0, 0.0, 0.0), angles: Vector3::new(Deg(0.0), Deg(0.0), Deg(0.0)), model_id: 0, frame_id: 0, colormap: 0, skin_id: 0, effects: EntityEffects::empty(), } } } #[derive(Clone, Debug, PartialEq)] pub struct EntityUpdate { pub ent_id: u16, pub model_id: Option<u8>, pub frame_id: Option<u8>, pub colormap: Option<u8>, pub skin_id: Option<u8>, pub effects: Option<EntityEffects>, pub origin_x: Option<f32>, pub pitch: Option<Deg<f32>>, pub origin_y: Option<f32>, pub yaw: Option<Deg<f32>>, pub origin_z: Option<f32>, pub roll: Option<Deg<f32>>, pub no_lerp: bool, } #[derive(Clone, Debug, PartialEq)] pub struct PlayerData { pub view_height: Option<f32>, pub ideal_pitch: Option<Deg<f32>>, pub punch_pitch: Option<Deg<f32>>, pub velocity_x: Option<f32>, pub punch_yaw: Option<Deg<f32>>, pub velocity_y: Option<f32>, pub punch_roll: Option<Deg<f32>>, pub velocity_z: Option<f32>, pub items: ItemFlags, pub on_ground: bool, pub in_water: bool, pub weapon_frame: Option<u8>, pub armor: Option<u8>, pub weapon: Option<u8>, pub health: i16, pub ammo: u8, pub ammo_shells: u8, pub ammo_nails: u8, pub ammo_rockets: u8, pub ammo_cells: u8, pub active_weapon: u8, } impl EntityUpdate { /// Create an `EntityState` from this update, filling in any `None` values /// from the specified baseline state. pub fn to_entity_state(&self, baseline: &EntityState) -> EntityState { EntityState { origin: Vector3::new( self.origin_x.unwrap_or(baseline.origin.x), self.origin_y.unwrap_or(baseline.origin.y), self.origin_z.unwrap_or(baseline.origin.z), ), angles: Vector3::new( self.pitch.unwrap_or(baseline.angles[0]), self.yaw.unwrap_or(baseline.angles[1]), self.roll.unwrap_or(baseline.angles[2]), ), model_id: self.model_id.map_or(baseline.model_id, |m| m as usize), frame_id: self.frame_id.map_or(baseline.frame_id, |f| f as usize), skin_id: self.skin_id.map_or(baseline.skin_id, |s| s as usize), effects: self.effects.unwrap_or(baseline.effects), colormap: self.colormap.unwrap_or(baseline.colormap), } } } /// A trait for in-game server and client network commands. pub trait Cmd: Sized { /// Returns the numeric value of this command's code. fn code(&self) -> u8; /// Reads data from the given source and constructs a command object. fn deserialize<R>(reader: &mut R) -> Result<Self, NetError> where R: BufRead + ReadBytesExt; /// Writes this command's content to the given sink. fn serialize<W>(&self, writer: &mut W) -> Result<(), NetError> where W: WriteBytesExt; } // TODO: use feature(arbitrary_enum_discriminant) #[derive(Debug, FromPrimitive)] pub enum ServerCmdCode { Bad = 0, NoOp = 1, Disconnect = 2, UpdateStat = 3, Version = 4, SetView = 5, Sound = 6, Time = 7, Print = 8, StuffText = 9, SetAngle = 10, ServerInfo = 11, LightStyle = 12, UpdateName = 13, UpdateFrags = 14, PlayerData = 15, StopSound = 16, UpdateColors = 17, Particle = 18, Damage = 19, SpawnStatic = 20, // SpawnBinary = 21, // unused SpawnBaseline = 22, TempEntity = 23, SetPause = 24, SignOnStage = 25, CenterPrint = 26, KilledMonster = 27, FoundSecret = 28, SpawnStaticSound = 29, Intermission = 30, Finale = 31, CdTrack = 32, SellScreen = 33, Cutscene = 34, } #[derive(Copy, Clone, Debug, Eq, FromPrimitive, PartialEq)] pub enum GameType { CoOp = 0, Deathmatch = 1, } #[derive(Debug, PartialEq)] pub enum ServerCmd { Bad, NoOp, Disconnect, UpdateStat { stat: ClientStat, value: i32, }, Version { version: i32, }, SetView { ent_id: i16, }, Sound { volume: Option<u8>, attenuation: Option<f32>, entity_id: u16, channel: i8, sound_id: u8, position: Vector3<f32>, }, Time { time: f32, }, Print { text: String, }, StuffText { text: String, }, SetAngle { angles: Vector3<Deg<f32>>, }, ServerInfo { protocol_version: i32, max_clients: u8, game_type: GameType, message: String, model_precache: Vec<String>, sound_precache: Vec<String>, }, LightStyle { id: u8, value: String, }, UpdateName { player_id: u8, new_name: String, }, UpdateFrags { player_id: u8, new_frags: i16, }, PlayerData(PlayerData), StopSound { entity_id: u16, channel: u8, }, UpdateColors { player_id: u8, new_colors: PlayerColor, }, Particle { origin: Vector3<f32>, direction: Vector3<f32>, count: u8, color: u8, }, Damage { armor: u8, blood: u8, source: Vector3<f32>, }, SpawnStatic { model_id: u8, frame_id: u8, colormap: u8, skin_id: u8, origin: Vector3<f32>, angles: Vector3<Deg<f32>>, }, // SpawnBinary, // unused SpawnBaseline { ent_id: u16, model_id: u8, frame_id: u8, colormap: u8, skin_id: u8, origin: Vector3<f32>, angles: Vector3<Deg<f32>>, }, TempEntity { temp_entity: TempEntity, }, SetPause { paused: bool, }, SignOnStage { stage: SignOnStage, }, CenterPrint { text: String, }, KilledMonster, FoundSecret, SpawnStaticSound { origin: Vector3<f32>, sound_id: u8, volume: u8, attenuation: u8, }, Intermission, Finale { text: String, }, CdTrack { track: u8, loop_: u8, }, SellScreen, Cutscene { text: String, }, FastUpdate(EntityUpdate), } impl ServerCmd { pub fn code(&self) -> u8 { let code = match *self { ServerCmd::Bad => ServerCmdCode::Bad, ServerCmd::NoOp => ServerCmdCode::NoOp, ServerCmd::Disconnect => ServerCmdCode::Disconnect, ServerCmd::UpdateStat { .. } => ServerCmdCode::UpdateStat, ServerCmd::Version { .. } => ServerCmdCode::Version, ServerCmd::SetView { .. } => ServerCmdCode::SetView, ServerCmd::Sound { .. } => ServerCmdCode::Sound, ServerCmd::Time { .. } => ServerCmdCode::Time, ServerCmd::Print { .. } => ServerCmdCode::Print, ServerCmd::StuffText { .. } => ServerCmdCode::StuffText, ServerCmd::SetAngle { .. } => ServerCmdCode::SetAngle, ServerCmd::ServerInfo { .. } => ServerCmdCode::ServerInfo, ServerCmd::LightStyle { .. } => ServerCmdCode::LightStyle, ServerCmd::UpdateName { .. } => ServerCmdCode::UpdateName, ServerCmd::UpdateFrags { .. } => ServerCmdCode::UpdateFrags, ServerCmd::PlayerData(_) => ServerCmdCode::PlayerData, ServerCmd::StopSound { .. } => ServerCmdCode::StopSound, ServerCmd::UpdateColors { .. } => ServerCmdCode::UpdateColors, ServerCmd::Particle { .. } => ServerCmdCode::Particle, ServerCmd::Damage { .. } => ServerCmdCode::Damage, ServerCmd::SpawnStatic { .. } => ServerCmdCode::SpawnStatic, ServerCmd::SpawnBaseline { .. } => ServerCmdCode::SpawnBaseline, ServerCmd::TempEntity { .. } => ServerCmdCode::TempEntity, ServerCmd::SetPause { .. } => ServerCmdCode::SetPause, ServerCmd::SignOnStage { .. } => ServerCmdCode::SignOnStage, ServerCmd::CenterPrint { .. } => ServerCmdCode::CenterPrint, ServerCmd::KilledMonster => ServerCmdCode::KilledMonster, ServerCmd::FoundSecret => ServerCmdCode::FoundSecret, ServerCmd::SpawnStaticSound { .. } => ServerCmdCode::SpawnStaticSound, ServerCmd::Intermission => ServerCmdCode::Intermission, ServerCmd::Finale { .. } => ServerCmdCode::Finale, ServerCmd::CdTrack { .. } => ServerCmdCode::CdTrack, ServerCmd::SellScreen => ServerCmdCode::SellScreen, ServerCmd::Cutscene { .. } => ServerCmdCode::Cutscene, // TODO: figure out a more elegant way of doing this ServerCmd::FastUpdate(_) => panic!("FastUpdate has no code"), }; code as u8 } pub fn deserialize<R>(reader: &mut R) -> Result<Option<ServerCmd>, NetError> where R: BufRead + ReadBytesExt, { let code_num = match reader.read_u8() { Ok(c) => c, Err(ref e) if e.kind() == ::std::io::ErrorKind::UnexpectedEof => return Ok(None), Err(e) => return Err(NetError::from(e)), }; if code_num & FAST_UPDATE_FLAG != 0 { let all_bits; let low_bits = code_num & !FAST_UPDATE_FLAG; if low_bits & UpdateFlags::MORE_BITS.bits() as u8 != 0 { let high_bits = reader.read_u8()?; all_bits = (high_bits as u16) << 8 | low_bits as u16; } else { all_bits = low_bits as u16; } let update_flags = match UpdateFlags::from_bits(all_bits) { Some(u) => u, None => { return Err(NetError::InvalidData(format!( "UpdateFlags: {:b}", all_bits ))) } }; let ent_id; if update_flags.contains(UpdateFlags::LONG_ENTITY) { ent_id = reader.read_u16::<LittleEndian>()?; } else { ent_id = reader.read_u8()? as u16; } let model_id; if update_flags.contains(UpdateFlags::MODEL) { model_id = Some(reader.read_u8()?); } else { model_id = None; } let frame_id; if update_flags.contains(UpdateFlags::FRAME) { frame_id = Some(reader.read_u8()?); } else { frame_id = None; } let colormap; if update_flags.contains(UpdateFlags::COLORMAP) { colormap = Some(reader.read_u8()?); } else { colormap = None; } let skin_id; if update_flags.contains(UpdateFlags::SKIN) { skin_id = Some(reader.read_u8()?); } else { skin_id = None; } let effects; if update_flags.contains(UpdateFlags::EFFECTS) { let effects_bits = reader.read_u8()?; effects = match EntityEffects::from_bits(effects_bits) { Some(e) => Some(e), None => { return Err(NetError::InvalidData(format!( "EntityEffects: {:b}", effects_bits ))) } }; } else { effects = None; } let origin_x; if update_flags.contains(UpdateFlags::ORIGIN_X) { origin_x = Some(read_coord(reader)?); } else { origin_x = None; } let pitch; if update_flags.contains(UpdateFlags::PITCH) { pitch = Some(read_angle(reader)?); } else { pitch = None; } let origin_y; if update_flags.contains(UpdateFlags::ORIGIN_Y) { origin_y = Some(read_coord(reader)?); } else { origin_y = None; } let yaw; if update_flags.contains(UpdateFlags::YAW) { yaw = Some(read_angle(reader)?); } else { yaw = None; } let origin_z; if update_flags.contains(UpdateFlags::ORIGIN_Z) { origin_z = Some(read_coord(reader)?); } else { origin_z = None; } let roll; if update_flags.contains(UpdateFlags::ROLL) { roll = Some(read_angle(reader)?); } else { roll = None; } let no_lerp = update_flags.contains(UpdateFlags::NO_LERP); return Ok(Some(ServerCmd::FastUpdate(EntityUpdate { ent_id, model_id, frame_id, colormap, skin_id, effects, origin_x, pitch, origin_y, yaw, origin_z, roll, no_lerp, }))); } let code = match ServerCmdCode::from_u8(code_num) { Some(c) => c, None => { return Err(NetError::InvalidData(format!( "Invalid server command code: {}", code_num ))) } }; let cmd = match code { ServerCmdCode::Bad => ServerCmd::Bad, ServerCmdCode::NoOp => ServerCmd::NoOp, ServerCmdCode::Disconnect => ServerCmd::Disconnect, ServerCmdCode::UpdateStat => { let stat_id = reader.read_u8()?; let stat = match ClientStat::from_u8(stat_id) { Some(c) => c, None => { return Err(NetError::InvalidData(format!( "value for ClientStat: {}", stat_id, ))) } }; let value = reader.read_i32::<LittleEndian>()?; ServerCmd::UpdateStat { stat, value } } ServerCmdCode::Version => { let version = reader.read_i32::<LittleEndian>()?; ServerCmd::Version { version } } ServerCmdCode::SetView => { let ent_id = reader.read_i16::<LittleEndian>()?; ServerCmd::SetView { ent_id } } ServerCmdCode::Sound => { let flags_bits = reader.read_u8()?; let flags = match SoundFlags::from_bits(flags_bits) { Some(f) => f, None => { return Err(NetError::InvalidData(format!( "SoundFlags: {:b}", flags_bits ))) } }; let volume = match flags.contains(SoundFlags::VOLUME) { true => Some(reader.read_u8()?), false => None, }; let attenuation = match flags.contains(SoundFlags::ATTENUATION) { true => Some(reader.read_u8()? as f32 * SOUND_ATTENUATION_READ_FACTOR), false => None, }; let entity_channel = reader.read_i16::<LittleEndian>()?; let entity_id = (entity_channel >> 3) as u16; let channel = (entity_channel & 0b111) as i8; let sound_id = reader.read_u8()?; let position = Vector3::new( read_coord(reader)?, read_coord(reader)?, read_coord(reader)?, ); ServerCmd::Sound { volume, attenuation, entity_id, channel, sound_id, position, } } ServerCmdCode::Time => { let time = reader.read_f32::<LittleEndian>()?; ServerCmd::Time { time } } ServerCmdCode::Print => { let text = match util::read_cstring(reader) { Ok(t) => t, Err(e) => return Err(NetError::with_msg(format!("{}", e))), }; ServerCmd::Print { text } } ServerCmdCode::StuffText => { let text = match util::read_cstring(reader) { Ok(t) => t, Err(e) => return Err(NetError::with_msg(format!("{}", e))), }; ServerCmd::StuffText { text } } ServerCmdCode::SetAngle => { let angles = Vector3::new( read_angle(reader)?, read_angle(reader)?, read_angle(reader)?, ); ServerCmd::SetAngle { angles } } ServerCmdCode::ServerInfo => { let protocol_version = reader.read_i32::<LittleEndian>()?; let max_clients = reader.read_u8()?; let game_type_code = reader.read_u8()?; let game_type = match GameType::from_u8(game_type_code) { Some(g) => g, None => { return Err(NetError::InvalidData(format!( "Invalid game type ({})", game_type_code ))) } }; let message = util::read_cstring(reader).unwrap(); let mut model_precache = Vec::new(); loop { let model_name = util::read_cstring(reader).unwrap(); if model_name.is_empty() { break; } model_precache.push(model_name); } let mut sound_precache = Vec::new(); loop { let sound_name = util::read_cstring(reader).unwrap(); if sound_name.is_empty() { break; } sound_precache.push(sound_name); } ServerCmd::ServerInfo { protocol_version, max_clients, game_type, message, model_precache, sound_precache, } } ServerCmdCode::LightStyle => { let id = reader.read_u8()?; let value = util::read_cstring(reader).unwrap(); ServerCmd::LightStyle { id, value } } ServerCmdCode::UpdateName => { let player_id = reader.read_u8()?; let new_name = util::read_cstring(reader).unwrap(); ServerCmd::UpdateName { player_id, new_name, } } ServerCmdCode::UpdateFrags => { let player_id = reader.read_u8()?; let new_frags = reader.read_i16::<LittleEndian>()?; ServerCmd::UpdateFrags { player_id, new_frags, } } ServerCmdCode::PlayerData => { let flags_bits = reader.read_u16::<LittleEndian>()?; let flags = match ClientUpdateFlags::from_bits(flags_bits) { Some(f) => f, None => { return Err(NetError::InvalidData(format!( "client update flags: {:b}", flags_bits ))) } }; let view_height = match flags.contains(ClientUpdateFlags::VIEW_HEIGHT) { true => Some(reader.read_i8()? as f32), false => None, }; let ideal_pitch = match flags.contains(ClientUpdateFlags::IDEAL_PITCH) { true => Some(Deg(reader.read_i8()? as f32)), false => None, }; let punch_pitch = match flags.contains(ClientUpdateFlags::PUNCH_PITCH) { true => Some(Deg(reader.read_i8()? as f32)), false => None, }; let velocity_x = match flags.contains(ClientUpdateFlags::VELOCITY_X) { true => Some(reader.read_i8()? as f32 * VELOCITY_READ_FACTOR), false => None, }; let punch_yaw = match flags.contains(ClientUpdateFlags::PUNCH_YAW) { true => Some(Deg(reader.read_i8()? as f32)), false => None, }; let velocity_y = match flags.contains(ClientUpdateFlags::VELOCITY_Y) { true => Some(reader.read_i8()? as f32 * VELOCITY_READ_FACTOR), false => None, }; let punch_roll = match flags.contains(ClientUpdateFlags::PUNCH_ROLL) { true => Some(Deg(reader.read_i8()? as f32)), false => None, }; let velocity_z = match flags.contains(ClientUpdateFlags::VELOCITY_Z) { true => Some(reader.read_i8()? as f32 * VELOCITY_READ_FACTOR), false => None, }; let items_bits = reader.read_u32::<LittleEndian>()?; let items = match ItemFlags::from_bits(items_bits) { Some(i) => i, None => { return Err(NetError::InvalidData(format!( "ItemFlags: {:b}", items_bits ))) } }; let on_ground = flags.contains(ClientUpdateFlags::ON_GROUND); let in_water = flags.contains(ClientUpdateFlags::IN_WATER); let weapon_frame = match flags.contains(ClientUpdateFlags::WEAPON_FRAME) { true => Some(reader.read_u8()?), false => None, }; let armor = match flags.contains(ClientUpdateFlags::ARMOR) { true => Some(reader.read_u8()?), false => None, }; let weapon = match flags.contains(ClientUpdateFlags::WEAPON) { true => Some(reader.read_u8()?), false => None, }; let health = reader.read_i16::<LittleEndian>()?; let ammo = reader.read_u8()?; let ammo_shells = reader.read_u8()?; let ammo_nails = reader.read_u8()?; let ammo_rockets = reader.read_u8()?; let ammo_cells = reader.read_u8()?; let active_weapon = reader.read_u8()?; ServerCmd::PlayerData(PlayerData { view_height, ideal_pitch, punch_pitch, velocity_x, punch_yaw, velocity_y, punch_roll, velocity_z, items, on_ground, in_water, weapon_frame, armor, weapon, health, ammo, ammo_shells, ammo_nails, ammo_rockets, ammo_cells, active_weapon, }) } ServerCmdCode::StopSound => { let entity_channel = reader.read_u16::<LittleEndian>()?; let entity_id = entity_channel >> 3; let channel = (entity_channel & 0b111) as u8; ServerCmd::StopSound { entity_id, channel } } ServerCmdCode::UpdateColors => { let player_id = reader.read_u8()?; let new_colors_bits = reader.read_u8()?; let new_colors = PlayerColor::from_bits(new_colors_bits); ServerCmd::UpdateColors { player_id, new_colors, } } ServerCmdCode::Particle => { let origin = read_coord_vector3(reader)?; let mut direction = Vector3::zero(); for i in 0..3 { direction[i] = reader.read_i8()? as f32 * PARTICLE_DIRECTION_READ_FACTOR; } let count = reader.read_u8()?; let color = reader.read_u8()?; ServerCmd::Particle { origin, direction, count, color, } } ServerCmdCode::Damage => { let armor = reader.read_u8()?; let blood = reader.read_u8()?; let source = read_coord_vector3(reader)?; ServerCmd::Damage { armor, blood, source, } } ServerCmdCode::SpawnStatic => { let model_id = reader.read_u8()?; let frame_id = reader.read_u8()?; let colormap = reader.read_u8()?; let skin_id = reader.read_u8()?; let mut origin = Vector3::zero(); let mut angles = Vector3::new(Deg(0.0), Deg(0.0), Deg(0.0)); for i in 0..3 { origin[i] = read_coord(reader)?; angles[i] = read_angle(reader)?; } ServerCmd::SpawnStatic { model_id, frame_id, colormap, skin_id, origin, angles, } } ServerCmdCode::SpawnBaseline => { let ent_id = reader.read_u16::<LittleEndian>()?; let model_id = reader.read_u8()?; let frame_id = reader.read_u8()?; let colormap = reader.read_u8()?; let skin_id = reader.read_u8()?; let mut origin = Vector3::zero(); let mut angles = Vector3::new(Deg(0.0), Deg(0.0), Deg(0.0)); for i in 0..3 { origin[i] = read_coord(reader)?; angles[i] = read_angle(reader)?; } ServerCmd::SpawnBaseline { ent_id, model_id, frame_id, colormap, skin_id, origin, angles, } } ServerCmdCode::TempEntity => { let temp_entity = TempEntity::read_temp_entity(reader)?; ServerCmd::TempEntity { temp_entity } } ServerCmdCode::SetPause => { let paused = match reader.read_u8()? { 0 => false, 1 => true, x => return Err(NetError::InvalidData(format!("setpause: {}", x))), }; ServerCmd::SetPause { paused } } ServerCmdCode::SignOnStage => { let stage_num = reader.read_u8()?; let stage = match SignOnStage::from_u8(stage_num) { Some(s) => s, None => { return Err(NetError::InvalidData(format!( "Invalid value for sign-on stage: {}", stage_num ))) } }; ServerCmd::SignOnStage { stage } } ServerCmdCode::CenterPrint => { let text = match util::read_cstring(reader) { Ok(t) => t, Err(e) => return Err(NetError::with_msg(format!("{}", e))), }; ServerCmd::CenterPrint { text } } ServerCmdCode::KilledMonster => ServerCmd::KilledMonster, ServerCmdCode::FoundSecret => ServerCmd::FoundSecret, ServerCmdCode::SpawnStaticSound => { let origin = read_coord_vector3(reader)?; let sound_id = reader.read_u8()?; let volume = reader.read_u8()?; let attenuation = reader.read_u8()?; ServerCmd::SpawnStaticSound { origin, sound_id, volume, attenuation, } } ServerCmdCode::Intermission => ServerCmd::Intermission, ServerCmdCode::Finale => { let text = match util::read_cstring(reader) { Ok(t) => t, Err(e) => return Err(NetError::with_msg(format!("{}", e))), }; ServerCmd::Finale { text } } ServerCmdCode::CdTrack => { let track = reader.read_u8()?; let loop_ = reader.read_u8()?; ServerCmd::CdTrack { track, loop_ } } ServerCmdCode::SellScreen => ServerCmd::SellScreen, ServerCmdCode::Cutscene => { let text = match util::read_cstring(reader) { Ok(t) => t, Err(e) => return Err(NetError::with_msg(format!("{}", e))), }; ServerCmd::Cutscene { text } } }; Ok(Some(cmd)) } pub fn serialize<W>(&self, writer: &mut W) -> Result<(), NetError> where W: WriteBytesExt, { writer.write_u8(self.code())?; match *self { ServerCmd::Bad | ServerCmd::NoOp | ServerCmd::Disconnect => (), ServerCmd::UpdateStat { stat, value } => { writer.write_u8(stat as u8)?; writer.write_i32::<LittleEndian>(value)?; } ServerCmd::Version { version } => { writer.write_i32::<LittleEndian>(version)?; } ServerCmd::SetView { ent_id } => { writer.write_i16::<LittleEndian>(ent_id)?; } ServerCmd::Sound { volume, attenuation, entity_id, channel, sound_id, position, } => { let mut sound_flags = SoundFlags::empty(); if volume.is_some() { sound_flags |= SoundFlags::VOLUME; } if attenuation.is_some() { sound_flags |= SoundFlags::ATTENUATION; } writer.write_u8(sound_flags.bits())?; if let Some(v) = volume { writer.write_u8(v)?; } if let Some(a) = attenuation { writer.write_u8(a as u8 * SOUND_ATTENUATION_WRITE_FACTOR)?; } // TODO: document this better. The entity and channel fields are combined in Sound commands. let ent_channel = (entity_id as i16) << 3 | channel as i16 & 0b111; writer.write_i16::<LittleEndian>(ent_channel)?; writer.write_u8(sound_id)?; for component in 0..3 { write_coord(writer, position[component])?; } } ServerCmd::Time { time } => writer.write_f32::<LittleEndian>(time)?, ServerCmd::Print { ref text } => { writer.write(text.as_bytes())?; writer.write_u8(0)?; } ServerCmd::StuffText { ref text } => { writer.write(text.as_bytes())?; writer.write_u8(0)?; } ServerCmd::SetAngle { angles } => write_angle_vector3(writer, angles)?, ServerCmd::ServerInfo { protocol_version, max_clients, game_type, ref message, ref model_precache, ref sound_precache, } => { writer.write_i32::<LittleEndian>(protocol_version)?; writer.write_u8(max_clients)?; writer.write_u8(game_type as u8)?; writer.write(message.as_bytes())?; writer.write_u8(0)?; for model_name in model_precache.iter() { writer.write(model_name.as_bytes())?; writer.write_u8(0)?; } writer.write_u8(0)?; for sound_name in sound_precache.iter() { writer.write(sound_name.as_bytes())?; writer.write_u8(0)?; } writer.write_u8(0)?; } ServerCmd::LightStyle { id, ref value } => { writer.write_u8(id)?; writer.write(value.as_bytes())?; writer.write_u8(0)?; } ServerCmd::UpdateName { player_id, ref new_name, } => { writer.write_u8(player_id)?; writer.write(new_name.as_bytes())?; writer.write_u8(0)?; } ServerCmd::UpdateFrags { player_id, new_frags, } => { writer.write_u8(player_id)?; writer.write_i16::<LittleEndian>(new_frags)?; } ServerCmd::PlayerData(PlayerData { view_height, ideal_pitch, punch_pitch, velocity_x, punch_yaw, velocity_y, punch_roll, velocity_z, items, on_ground, in_water, weapon_frame, armor, weapon, health, ammo, ammo_shells, ammo_nails, ammo_rockets, ammo_cells, active_weapon, }) => { let mut flags = ClientUpdateFlags::empty(); if view_height.is_some() { flags |= ClientUpdateFlags::VIEW_HEIGHT; } if ideal_pitch.is_some() { flags |= ClientUpdateFlags::IDEAL_PITCH; } if punch_pitch.is_some() { flags |= ClientUpdateFlags::PUNCH_PITCH; } if velocity_x.is_some() { flags |= ClientUpdateFlags::VELOCITY_X; } if punch_yaw.is_some() { flags |= ClientUpdateFlags::PUNCH_YAW; } if velocity_y.is_some() { flags |= ClientUpdateFlags::VELOCITY_Y; } if punch_roll.is_some() { flags |= ClientUpdateFlags::PUNCH_ROLL; } if velocity_z.is_some() { flags |= ClientUpdateFlags::VELOCITY_Z; } // items are always sent flags |= ClientUpdateFlags::ITEMS; if on_ground { flags |= ClientUpdateFlags::ON_GROUND; } if in_water { flags |= ClientUpdateFlags::IN_WATER; } if weapon_frame.is_some() { flags |= ClientUpdateFlags::WEAPON_FRAME; } if armor.is_some() { flags |= ClientUpdateFlags::ARMOR; } if weapon.is_some() { flags |= ClientUpdateFlags::WEAPON; } // write flags writer.write_u16::<LittleEndian>(flags.bits())?; if let Some(vh) = view_height { writer.write_u8(vh as i32 as u8)?; } if let Some(ip) = ideal_pitch { writer.write_u8(ip.0 as i32 as u8)?; } if let Some(pp) = punch_pitch { writer.write_u8(pp.0 as i32 as u8)?; } if let Some(vx) = velocity_x { writer.write_u8((vx * VELOCITY_WRITE_FACTOR) as i32 as u8)?; } if let Some(py) = punch_yaw { writer.write_u8(py.0 as i32 as u8)?; } if let Some(vy) = velocity_y { writer.write_u8((vy * VELOCITY_WRITE_FACTOR) as i32 as u8)?; } if let Some(pr) = punch_roll { writer.write_u8(pr.0 as i32 as u8)?; } if let Some(vz) = velocity_z { writer.write_u8((vz * VELOCITY_WRITE_FACTOR) as i32 as u8)?; } writer.write_u32::<LittleEndian>(items.bits())?; if let Some(wf) = weapon_frame { writer.write_u8(wf)?; } if let Some(a) = armor { writer.write_u8(a)?; } if let Some(w) = weapon { writer.write_u8(w)?; } writer.write_i16::<LittleEndian>(health)?; writer.write_u8(ammo)?; writer.write_u8(ammo_shells)?; writer.write_u8(ammo_nails)?; writer.write_u8(ammo_rockets)?; writer.write_u8(ammo_cells)?; writer.write_u8(active_weapon)?; } ServerCmd::StopSound { entity_id, channel } => { let entity_channel = entity_id << 3 | channel as u16 & 0b111; writer.write_u16::<LittleEndian>(entity_channel)?; } ServerCmd::UpdateColors { player_id, new_colors, } => { writer.write_u8(player_id)?; writer.write_u8(new_colors.bits())?; } ServerCmd::Particle { origin, direction, count, color, } => { write_coord_vector3(writer, origin)?; for i in 0..3 { writer.write_i8(match direction[i] * PARTICLE_DIRECTION_WRITE_FACTOR { d if d > ::std::i8::MAX as f32 => ::std::i8::MAX, d if d < ::std::i8::MIN as f32 => ::std::i8::MIN, d => d as i8, })?; } writer.write_u8(count)?; writer.write_u8(color)?; } ServerCmd::Damage { armor, blood, source, } => { writer.write_u8(armor)?; writer.write_u8(blood)?; write_coord_vector3(writer, source)?; } ServerCmd::SpawnStatic { model_id, frame_id, colormap, skin_id, origin, angles, } => { writer.write_u8(model_id)?; writer.write_u8(frame_id)?; writer.write_u8(colormap)?; writer.write_u8(skin_id)?; for i in 0..3 { write_coord(writer, origin[i])?; write_angle(writer, angles[i])?; } } ServerCmd::SpawnBaseline { ent_id, model_id, frame_id, colormap, skin_id, origin, angles, } => { writer.write_u16::<LittleEndian>(ent_id)?; writer.write_u8(model_id)?; writer.write_u8(frame_id)?; writer.write_u8(colormap)?; writer.write_u8(skin_id)?; for i in 0..3 { write_coord(writer, origin[i])?; write_angle(writer, angles[i])?; } } ServerCmd::TempEntity { ref temp_entity } => { temp_entity.write_temp_entity(writer)?; } ServerCmd::SetPause { paused } => { writer.write_u8(match paused { false => 0, true => 1, })?; } ServerCmd::SignOnStage { stage } => { writer.write_u8(stage as u8)?; } ServerCmd::CenterPrint { ref text } => { writer.write(text.as_bytes())?; writer.write_u8(0)?; } ServerCmd::KilledMonster | ServerCmd::FoundSecret => (), ServerCmd::SpawnStaticSound { origin, sound_id, volume, attenuation, } => { write_coord_vector3(writer, origin)?; writer.write_u8(sound_id)?; writer.write_u8(volume)?; writer.write_u8(attenuation)?; } ServerCmd::Intermission => (), ServerCmd::Finale { ref text } => { writer.write(text.as_bytes())?; writer.write_u8(0)?; } ServerCmd::CdTrack { track, loop_ } => { writer.write_u8(track)?; writer.write_u8(loop_)?; } ServerCmd::SellScreen => (), ServerCmd::Cutscene { ref text } => { writer.write(text.as_bytes())?; writer.write_u8(0)?; } // TODO ServerCmd::FastUpdate(_) => unimplemented!(), } Ok(()) } } #[derive(FromPrimitive)] pub enum ClientCmdCode { Bad = 0, NoOp = 1, Disconnect = 2, Move = 3, StringCmd = 4, } #[derive(Debug, PartialEq)] pub enum ClientCmd { Bad, NoOp, Disconnect, Move { send_time: Duration, angles: Vector3<Deg<f32>>, fwd_move: i16, side_move: i16, up_move: i16, button_flags: ButtonFlags, impulse: u8, }, StringCmd { cmd: String, }, } impl ClientCmd { pub fn code(&self) -> u8 { match *self { ClientCmd::Bad => ClientCmdCode::Bad as u8, ClientCmd::NoOp => ClientCmdCode::NoOp as u8, ClientCmd::Disconnect => ClientCmdCode::Disconnect as u8, ClientCmd::Move { .. } => ClientCmdCode::Move as u8, ClientCmd::StringCmd { .. } => ClientCmdCode::StringCmd as u8, } } pub fn deserialize<R>(reader: &mut R) -> Result<ClientCmd, NetError> where R: ReadBytesExt + BufRead, { let code_val = reader.read_u8()?; let code = match ClientCmdCode::from_u8(code_val) { Some(c) => c, None => { return Err(NetError::InvalidData(format!( "Invalid client command code: {}", code_val ))) } }; let cmd = match code { ClientCmdCode::Bad => ClientCmd::Bad, ClientCmdCode::NoOp => ClientCmd::NoOp, ClientCmdCode::Disconnect => ClientCmd::Disconnect, ClientCmdCode::Move => { let send_time = engine::duration_from_f32(reader.read_f32::<LittleEndian>()?); let angles = Vector3::new( read_angle(reader)?, read_angle(reader)?, read_angle(reader)?, ); let fwd_move = reader.read_i16::<LittleEndian>()?; let side_move = reader.read_i16::<LittleEndian>()?; let up_move = reader.read_i16::<LittleEndian>()?; let button_flags_val = reader.read_u8()?; let button_flags = match ButtonFlags::from_bits(button_flags_val) { Some(bf) => bf, None => { return Err(NetError::InvalidData(format!( "Invalid value for button flags: {}", button_flags_val ))) } }; let impulse = reader.read_u8()?; ClientCmd::Move { send_time, angles, fwd_move, side_move, up_move, button_flags, impulse, } } ClientCmdCode::StringCmd => { let cmd = util::read_cstring(reader).unwrap(); ClientCmd::StringCmd { cmd } } }; Ok(cmd) } pub fn serialize<W>(&self, writer: &mut W) -> Result<(), NetError> where W: WriteBytesExt, { writer.write_u8(self.code())?; match *self { ClientCmd::Bad => (), ClientCmd::NoOp => (), ClientCmd::Disconnect => (), ClientCmd::Move { send_time, angles, fwd_move, side_move, up_move, button_flags, impulse, } => { writer.write_f32::<LittleEndian>(engine::duration_to_f32(send_time))?; write_angle_vector3(writer, angles)?; writer.write_i16::<LittleEndian>(fwd_move)?; writer.write_i16::<LittleEndian>(side_move)?; writer.write_i16::<LittleEndian>(up_move)?; writer.write_u8(button_flags.bits())?; writer.write_u8(impulse)?; } ClientCmd::StringCmd { ref cmd } => { writer.write(cmd.as_bytes())?; writer.write_u8(0)?; } } Ok(()) } } #[derive(PartialEq)] pub enum BlockingMode { Blocking, NonBlocking, Timeout(Duration), } pub struct QSocket { socket: UdpSocket, remote: SocketAddr, unreliable_send_sequence: u32, unreliable_recv_sequence: u32, ack_sequence: u32, send_sequence: u32, send_queue: VecDeque<Box<[u8]>>, send_cache: Box<[u8]>, send_next: bool, send_count: usize, resend_count: usize, recv_sequence: u32, recv_buf: [u8; MAX_MESSAGE], } impl QSocket { pub fn new(socket: UdpSocket, remote: SocketAddr) -> QSocket { QSocket { socket, remote, unreliable_send_sequence: 0, unreliable_recv_sequence: 0, ack_sequence: 0, send_sequence: 0, send_queue: VecDeque::new(), send_cache: Box::new([]), send_count: 0, send_next: false, resend_count: 0, recv_sequence: 0, recv_buf: [0; MAX_MESSAGE], } } pub fn can_send(&self) -> bool { self.send_queue.is_empty() && self.send_cache.is_empty() } /// Begin sending a reliable message over this socket. pub fn begin_send_msg(&mut self, msg: &[u8]) -> Result<(), NetError> { // make sure all reliable messages have been ACKed in their entirety if !self.send_queue.is_empty() { return Err(NetError::with_msg( "begin_send_msg: previous message unacknowledged", )); } // empty messages are an error if msg.len() == 0 { return Err(NetError::with_msg( "begin_send_msg: Input data has zero length", )); } // check upper message length bound if msg.len() > MAX_MESSAGE { return Err(NetError::with_msg( "begin_send_msg: Input data exceeds MAX_MESSAGE", )); } // split the message into chunks and enqueue them for chunk in msg.chunks(MAX_DATAGRAM) { self.send_queue .push_back(chunk.to_owned().into_boxed_slice()); } // send the first chunk self.send_msg_next()?; Ok(()) } /// Resend the last reliable message packet. pub fn resend_msg(&mut self) -> Result<(), NetError> { if self.send_cache.is_empty() { Err(NetError::with_msg("Attempted resend with empty send cache")) } else { self.socket.send_to(&self.send_cache, self.remote)?; self.resend_count += 1; Ok(()) } } /// Send the next segment of a reliable message. pub fn send_msg_next(&mut self) -> Result<(), NetError> { // grab the first chunk in the queue let content = self .send_queue .pop_front() .expect("Send queue is empty (this is a bug)"); // if this was the last chunk, set the EOM flag let msg_kind = match self.send_queue.is_empty() { true => MsgKind::ReliableEom, false => MsgKind::Reliable, }; // compose the packet let mut compose = Vec::with_capacity(MAX_PACKET); compose.write_u16::<NetworkEndian>(msg_kind as u16)?; compose.write_u16::<NetworkEndian>((HEADER_SIZE + content.len()) as u16)?; compose.write_u32::<NetworkEndian>(self.send_sequence)?; compose.write_all(&content)?; // store packet to send cache self.send_cache = compose.into_boxed_slice(); // increment send sequence self.send_sequence += 1; // send the composed packet self.socket.send_to(&self.send_cache, self.remote)?; // TODO: update send time // bump send count self.send_count += 1; // don't send the next chunk until this one gets ACKed self.send_next = false; Ok(()) } pub fn send_msg_unreliable(&mut self, content: &[u8]) -> Result<(), NetError> { if content.len() == 0 { return Err(NetError::with_msg("Unreliable message has zero length")); } if content.len() > MAX_DATAGRAM { return Err(NetError::with_msg( "Unreliable message length exceeds MAX_DATAGRAM", )); } let packet_len = HEADER_SIZE + content.len(); // compose the packet let mut packet = Vec::with_capacity(MAX_PACKET); packet.write_u16::<NetworkEndian>(MsgKind::Unreliable as u16)?; packet.write_u16::<NetworkEndian>(packet_len as u16)?; packet.write_u32::<NetworkEndian>(self.unreliable_send_sequence)?; packet.write_all(content)?; // increment unreliable send sequence self.unreliable_send_sequence += 1; // send the message self.socket.send_to(&packet, self.remote)?; // bump send count self.send_count += 1; Ok(()) } /// Receive a message on this socket. // TODO: the flow control in this function is completely baffling, make it a little less awful pub fn recv_msg(&mut self, block: BlockingMode) -> Result<Vec<u8>, NetError> { let mut msg = Vec::new(); match block { BlockingMode::Blocking => { self.socket.set_nonblocking(false)?; self.socket.set_read_timeout(None)?; } BlockingMode::NonBlocking => { self.socket.set_nonblocking(true)?; self.socket.set_read_timeout(None)?; } BlockingMode::Timeout(d) => { self.socket.set_nonblocking(false)?; self.socket.set_read_timeout(Some(d.to_std().unwrap()))?; } } loop { let (packet_len, src_addr) = match self.socket.recv_from(&mut self.recv_buf) { Ok(x) => x, Err(e) => { use std::io::ErrorKind; match e.kind() { // these errors are expected in nonblocking mode ErrorKind::WouldBlock | ErrorKind::TimedOut => return Ok(Vec::new()), _ => return Err(NetError::from(e)), } } }; if src_addr != self.remote { // this packet didn't come from remote, drop it debug!( "forged packet (src_addr was {}, should be {})", src_addr, self.remote ); continue; } let mut reader = BufReader::new(Cursor::new(&self.recv_buf[..packet_len])); let msg_kind_code = reader.read_u16::<NetworkEndian>()?; let msg_kind = match MsgKind::from_u16(msg_kind_code) { Some(f) => f, None => { return Err(NetError::InvalidData(format!( "Invalid message kind: {}", msg_kind_code ))) } }; if packet_len < HEADER_SIZE { // TODO: increment short packet count debug!("short packet"); continue; } let field_len = reader.read_u16::<NetworkEndian>()?; if field_len as usize != packet_len { return Err(NetError::InvalidData(format!( "Length field and actual length differ ({} != {})", field_len, packet_len ))); } let sequence; if msg_kind != MsgKind::Ctl { sequence = reader.read_u32::<NetworkEndian>()?; } else { sequence = 0; } match msg_kind { // ignore control messages MsgKind::Ctl => (), MsgKind::Unreliable => { // we've received a newer datagram, ignore if sequence < self.unreliable_recv_sequence { println!("Stale datagram with sequence # {}", sequence); break; } // we've skipped some datagrams, count them as dropped if sequence > self.unreliable_recv_sequence { let drop_count = sequence - self.unreliable_recv_sequence; println!( "Dropped {} packet(s) ({} -> {})", drop_count, sequence, self.unreliable_recv_sequence ); } self.unreliable_recv_sequence = sequence + 1; // copy the rest of the packet into the message buffer and return reader.read_to_end(&mut msg)?; return Ok(msg); } MsgKind::Ack => { if sequence != self.send_sequence - 1 { println!("Stale ACK received"); } else if sequence != self.ack_sequence { println!("Duplicate ACK received"); } else { self.ack_sequence += 1; if self.ack_sequence != self.send_sequence { return Err(NetError::with_msg("ACK sequencing error")); } // our last reliable message has been acked if self.send_queue.is_empty() { // the whole message is through, clear the send cache self.send_cache = Box::new([]); } else { // send the next chunk before returning self.send_next = true; } } } // TODO: once we start reading a reliable message, don't allow other packets until // we have the whole thing MsgKind::Reliable | MsgKind::ReliableEom => { // send ack message and increment self.recv_sequence let mut ack_buf: [u8; HEADER_SIZE] = [0; HEADER_SIZE]; let mut ack_curs = Cursor::new(&mut ack_buf[..]); ack_curs.write_u16::<NetworkEndian>(MsgKind::Ack as u16)?; ack_curs.write_u16::<NetworkEndian>(HEADER_SIZE as u16)?; ack_curs.write_u32::<NetworkEndian>(sequence)?; self.socket.send_to(ack_curs.into_inner(), self.remote)?; // if this was a duplicate, drop it if sequence != self.recv_sequence { println!("Duplicate message received"); continue; } self.recv_sequence += 1; reader.read_to_end(&mut msg)?; // if this is the last chunk of a reliable message, break out and return if msg_kind == MsgKind::ReliableEom { break; } } } } if self.send_next { self.send_msg_next()?; } Ok(msg) } } fn read_coord<R>(reader: &mut R) -> Result<f32, NetError> where R: BufRead + ReadBytesExt, { Ok(reader.read_i16::<LittleEndian>()? as f32 / 8.0) } fn read_coord_vector3<R>(reader: &mut R) -> Result<Vector3<f32>, NetError> where R: BufRead + ReadBytesExt, { Ok(Vector3::new( read_coord(reader)?, read_coord(reader)?, read_coord(reader)?, )) } fn write_coord<W>(writer: &mut W, coord: f32) -> Result<(), NetError> where W: WriteBytesExt, { writer.write_i16::<LittleEndian>((coord * 8.0) as i16)?; Ok(()) } fn write_coord_vector3<W>(writer: &mut W, coords: Vector3<f32>) -> Result<(), NetError> where W: WriteBytesExt, { for coord in &coords[..] { write_coord(writer, *coord)?; } Ok(()) } fn read_angle<R>(reader: &mut R) -> Result<Deg<f32>, NetError> where R: BufRead + ReadBytesExt, { Ok(Deg(reader.read_i8()? as f32 * (360.0 / 256.0))) } fn read_angle_vector3<R>(reader: &mut R) -> Result<Vector3<Deg<f32>>, NetError> where R: BufRead + ReadBytesExt, { Ok(Vector3::new( read_angle(reader)?, read_angle(reader)?, read_angle(reader)?, )) } fn write_angle<W>(writer: &mut W, angle: Deg<f32>) -> Result<(), NetError> where W: WriteBytesExt, { writer.write_u8(((angle.0 as i32 * 256 / 360) & 0xFF) as u8)?; Ok(()) } fn write_angle_vector3<W>(writer: &mut W, angles: Vector3<Deg<f32>>) -> Result<(), NetError> where W: WriteBytesExt, { for angle in &angles[..] { write_angle(writer, *angle)?; } Ok(()) } #[cfg(test)] mod test { use super::*; use std::io::BufReader; #[test] fn test_server_cmd_update_stat_read_write_eq() { let src = ServerCmd::UpdateStat { stat: ClientStat::Nails, value: 64, }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_version_read_write_eq() { let src = ServerCmd::Version { version: 42 }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_set_view_read_write_eq() { let src = ServerCmd::SetView { ent_id: 17 }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_time_read_write_eq() { let src = ServerCmd::Time { time: 23.07 }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_print_read_write_eq() { let src = ServerCmd::Print { text: String::from("print test"), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_stuff_text_read_write_eq() { let src = ServerCmd::StuffText { text: String::from("stufftext test"), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_server_info_read_write_eq() { let src = ServerCmd::ServerInfo { protocol_version: 42, max_clients: 16, game_type: GameType::Deathmatch, message: String::from("Test message"), model_precache: vec![String::from("test1.bsp"), String::from("test2.bsp")], sound_precache: vec![String::from("test1.wav"), String::from("test2.wav")], }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_light_style_read_write_eq() { let src = ServerCmd::LightStyle { id: 11, value: String::from("aaaaabcddeefgghjjjkaaaazzzzyxwaaaba"), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_update_name_read_write_eq() { let src = ServerCmd::UpdateName { player_id: 7, new_name: String::from("newname"), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_update_frags_read_write_eq() { let src = ServerCmd::UpdateFrags { player_id: 7, new_frags: 11, }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_stop_sound_read_write_eq() { let src = ServerCmd::StopSound { entity_id: 17, channel: 3, }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_update_colors_read_write_eq() { let src = ServerCmd::UpdateColors { player_id: 11, new_colors: PlayerColor::new(4, 13), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_set_pause_read_write_eq() { let src = ServerCmd::SetPause { paused: true }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_sign_on_stage_read_write_eq() { let src = ServerCmd::SignOnStage { stage: SignOnStage::Begin, }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_center_print_read_write_eq() { let src = ServerCmd::CenterPrint { text: String::from("Center print test"), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_finale_read_write_eq() { let src = ServerCmd::Finale { text: String::from("Finale test"), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_cd_track_read_write_eq() { let src = ServerCmd::CdTrack { track: 5, loop_: 1 }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_server_cmd_cutscene_read_write_eq() { let src = ServerCmd::Cutscene { text: String::from("Cutscene test"), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ServerCmd::deserialize(&mut reader).unwrap().unwrap(); assert_eq!(src, dst); } #[test] fn test_client_cmd_string_cmd_read_write_eq() { let src = ClientCmd::StringCmd { cmd: String::from("StringCmd test"), }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ClientCmd::deserialize(&mut reader).unwrap(); assert_eq!(src, dst); } #[test] fn test_client_cmd_move_read_write_eq() { let src = ClientCmd::Move { send_time: Duration::milliseconds(1234), // have to use angles that won't lose precision from write_angle angles: Vector3::new(Deg(90.0), Deg(-90.0), Deg(0.0)), fwd_move: 27, side_move: 85, up_move: 76, button_flags: ButtonFlags::empty(), impulse: 121, }; let mut packet = Vec::new(); src.serialize(&mut packet).unwrap(); let mut reader = BufReader::new(packet.as_slice()); let dst = ClientCmd::deserialize(&mut reader).unwrap(); assert_eq!(src, dst); } fn gen_qsocket_pair() -> (QSocket, QSocket) { let src_udp = UdpSocket::bind("localhost:0").unwrap(); let src_addr = src_udp.local_addr().unwrap(); let dst_udp = UdpSocket::bind("localhost:0").unwrap(); let dst_addr = dst_udp.local_addr().unwrap(); ( QSocket::new(src_udp, dst_addr), QSocket::new(dst_udp, src_addr), ) } #[test] fn test_qsocket_send_msg_short() { let (mut src, mut dst) = gen_qsocket_pair(); let message = String::from("test message").into_bytes(); src.begin_send_msg(&message).unwrap(); let received = dst.recv_msg(BlockingMode::Blocking).unwrap(); assert_eq!(message, received); // TODO: assert can_send == true, send_next == false, etc } #[test] fn test_qsocket_send_msg_unreliable_recv_msg_eq() { let (mut src, mut dst) = gen_qsocket_pair(); let message = String::from("test message").into_bytes(); src.send_msg_unreliable(&message).unwrap(); let received = dst.recv_msg(BlockingMode::Blocking).unwrap(); assert_eq!(message, received); } #[test] #[should_panic] fn test_qsocket_send_msg_unreliable_zero_length_fails() { let (mut src, _) = gen_qsocket_pair(); let message = []; src.send_msg_unreliable(&message).unwrap(); } #[test] #[should_panic] fn test_qsocket_send_msg_unreliable_exceeds_max_length_fails() { let (mut src, _) = gen_qsocket_pair(); let message = [0; MAX_DATAGRAM + 1]; src.send_msg_unreliable(&message).unwrap(); } }<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::F5R2 { #[doc = r" Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct FB0R { bits: bool, } impl FB0R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB1R { bits: bool, } impl FB1R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB2R { bits: bool, } impl FB2R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB3R { bits: bool, } impl FB3R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB4R { bits: bool, } impl FB4R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB5R { bits: bool, } impl FB5R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB6R { bits: bool, } impl FB6R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB7R { bits: bool, } impl FB7R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB8R { bits: bool, } impl FB8R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB9R { bits: bool, } impl FB9R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB10R { bits: bool, } impl FB10R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB11R { bits: bool, } impl FB11R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB12R { bits: bool, } impl FB12R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB13R { bits: bool, } impl FB13R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB14R { bits: bool, } impl FB14R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB15R { bits: bool, } impl FB15R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB16R { bits: bool, } impl FB16R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB17R { bits: bool, } impl FB17R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB18R { bits: bool, } impl FB18R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB19R { bits: bool, } impl FB19R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB20R { bits: bool, } impl FB20R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB21R { bits: bool, } impl FB21R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB22R { bits: bool, } impl FB22R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB23R { bits: bool, } impl FB23R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB24R { bits: bool, } impl FB24R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB25R { bits: bool, } impl FB25R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB26R { bits: bool, } impl FB26R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB27R { bits: bool, } impl FB27R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB28R { bits: bool, } impl FB28R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB29R { bits: bool, } impl FB29R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB30R { bits: bool, } impl FB30R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct FB31R { bits: bool, } impl FB31R { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Proxy"] pub struct _FB0W<'a> { w: &'a mut W, } impl<'a> _FB0W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB1W<'a> { w: &'a mut W, } impl<'a> _FB1W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB2W<'a> { w: &'a mut W, } impl<'a> _FB2W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB3W<'a> { w: &'a mut W, } impl<'a> _FB3W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"]<|fim▁hole|> self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB4W<'a> { w: &'a mut W, } impl<'a> _FB4W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB5W<'a> { w: &'a mut W, } impl<'a> _FB5W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB6W<'a> { w: &'a mut W, } impl<'a> _FB6W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB7W<'a> { w: &'a mut W, } impl<'a> _FB7W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 7; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB8W<'a> { w: &'a mut W, } impl<'a> _FB8W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB9W<'a> { w: &'a mut W, } impl<'a> _FB9W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 9; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB10W<'a> { w: &'a mut W, } impl<'a> _FB10W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 10; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB11W<'a> { w: &'a mut W, } impl<'a> _FB11W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 11; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB12W<'a> { w: &'a mut W, } impl<'a> _FB12W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 12; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB13W<'a> { w: &'a mut W, } impl<'a> _FB13W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 13; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB14W<'a> { w: &'a mut W, } impl<'a> _FB14W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 14; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB15W<'a> { w: &'a mut W, } impl<'a> _FB15W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 15; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB16W<'a> { w: &'a mut W, } impl<'a> _FB16W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB17W<'a> { w: &'a mut W, } impl<'a> _FB17W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 17; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB18W<'a> { w: &'a mut W, } impl<'a> _FB18W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 18; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB19W<'a> { w: &'a mut W, } impl<'a> _FB19W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 19; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB20W<'a> { w: &'a mut W, } impl<'a> _FB20W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 20; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB21W<'a> { w: &'a mut W, } impl<'a> _FB21W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 21; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB22W<'a> { w: &'a mut W, } impl<'a> _FB22W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 22; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB23W<'a> { w: &'a mut W, } impl<'a> _FB23W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 23; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB24W<'a> { w: &'a mut W, } impl<'a> _FB24W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB25W<'a> { w: &'a mut W, } impl<'a> _FB25W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 25; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB26W<'a> { w: &'a mut W, } impl<'a> _FB26W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 26; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB27W<'a> { w: &'a mut W, } impl<'a> _FB27W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 27; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB28W<'a> { w: &'a mut W, } impl<'a> _FB28W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 28; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB29W<'a> { w: &'a mut W, } impl<'a> _FB29W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 29; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB30W<'a> { w: &'a mut W, } impl<'a> _FB30W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 30; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FB31W<'a> { w: &'a mut W, } impl<'a> _FB31W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 31; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0 - Filter bits"] #[inline(always)] pub fn fb0(&self) -> FB0R { let bits = { const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB0R { bits } } #[doc = "Bit 1 - Filter bits"] #[inline(always)] pub fn fb1(&self) -> FB1R { let bits = { const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB1R { bits } } #[doc = "Bit 2 - Filter bits"] #[inline(always)] pub fn fb2(&self) -> FB2R { let bits = { const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB2R { bits } } #[doc = "Bit 3 - Filter bits"] #[inline(always)] pub fn fb3(&self) -> FB3R { let bits = { const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB3R { bits } } #[doc = "Bit 4 - Filter bits"] #[inline(always)] pub fn fb4(&self) -> FB4R { let bits = { const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB4R { bits } } #[doc = "Bit 5 - Filter bits"] #[inline(always)] pub fn fb5(&self) -> FB5R { let bits = { const MASK: bool = true; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB5R { bits } } #[doc = "Bit 6 - Filter bits"] #[inline(always)] pub fn fb6(&self) -> FB6R { let bits = { const MASK: bool = true; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB6R { bits } } #[doc = "Bit 7 - Filter bits"] #[inline(always)] pub fn fb7(&self) -> FB7R { let bits = { const MASK: bool = true; const OFFSET: u8 = 7; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB7R { bits } } #[doc = "Bit 8 - Filter bits"] #[inline(always)] pub fn fb8(&self) -> FB8R { let bits = { const MASK: bool = true; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB8R { bits } } #[doc = "Bit 9 - Filter bits"] #[inline(always)] pub fn fb9(&self) -> FB9R { let bits = { const MASK: bool = true; const OFFSET: u8 = 9; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB9R { bits } } #[doc = "Bit 10 - Filter bits"] #[inline(always)] pub fn fb10(&self) -> FB10R { let bits = { const MASK: bool = true; const OFFSET: u8 = 10; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB10R { bits } } #[doc = "Bit 11 - Filter bits"] #[inline(always)] pub fn fb11(&self) -> FB11R { let bits = { const MASK: bool = true; const OFFSET: u8 = 11; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB11R { bits } } #[doc = "Bit 12 - Filter bits"] #[inline(always)] pub fn fb12(&self) -> FB12R { let bits = { const MASK: bool = true; const OFFSET: u8 = 12; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB12R { bits } } #[doc = "Bit 13 - Filter bits"] #[inline(always)] pub fn fb13(&self) -> FB13R { let bits = { const MASK: bool = true; const OFFSET: u8 = 13; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB13R { bits } } #[doc = "Bit 14 - Filter bits"] #[inline(always)] pub fn fb14(&self) -> FB14R { let bits = { const MASK: bool = true; const OFFSET: u8 = 14; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB14R { bits } } #[doc = "Bit 15 - Filter bits"] #[inline(always)] pub fn fb15(&self) -> FB15R { let bits = { const MASK: bool = true; const OFFSET: u8 = 15; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB15R { bits } } #[doc = "Bit 16 - Filter bits"] #[inline(always)] pub fn fb16(&self) -> FB16R { let bits = { const MASK: bool = true; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB16R { bits } } #[doc = "Bit 17 - Filter bits"] #[inline(always)] pub fn fb17(&self) -> FB17R { let bits = { const MASK: bool = true; const OFFSET: u8 = 17; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB17R { bits } } #[doc = "Bit 18 - Filter bits"] #[inline(always)] pub fn fb18(&self) -> FB18R { let bits = { const MASK: bool = true; const OFFSET: u8 = 18; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB18R { bits } } #[doc = "Bit 19 - Filter bits"] #[inline(always)] pub fn fb19(&self) -> FB19R { let bits = { const MASK: bool = true; const OFFSET: u8 = 19; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB19R { bits } } #[doc = "Bit 20 - Filter bits"] #[inline(always)] pub fn fb20(&self) -> FB20R { let bits = { const MASK: bool = true; const OFFSET: u8 = 20; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB20R { bits } } #[doc = "Bit 21 - Filter bits"] #[inline(always)] pub fn fb21(&self) -> FB21R { let bits = { const MASK: bool = true; const OFFSET: u8 = 21; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB21R { bits } } #[doc = "Bit 22 - Filter bits"] #[inline(always)] pub fn fb22(&self) -> FB22R { let bits = { const MASK: bool = true; const OFFSET: u8 = 22; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB22R { bits } } #[doc = "Bit 23 - Filter bits"] #[inline(always)] pub fn fb23(&self) -> FB23R { let bits = { const MASK: bool = true; const OFFSET: u8 = 23; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB23R { bits } } #[doc = "Bit 24 - Filter bits"] #[inline(always)] pub fn fb24(&self) -> FB24R { let bits = { const MASK: bool = true; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB24R { bits } } #[doc = "Bit 25 - Filter bits"] #[inline(always)] pub fn fb25(&self) -> FB25R { let bits = { const MASK: bool = true; const OFFSET: u8 = 25; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB25R { bits } } #[doc = "Bit 26 - Filter bits"] #[inline(always)] pub fn fb26(&self) -> FB26R { let bits = { const MASK: bool = true; const OFFSET: u8 = 26; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB26R { bits } } #[doc = "Bit 27 - Filter bits"] #[inline(always)] pub fn fb27(&self) -> FB27R { let bits = { const MASK: bool = true; const OFFSET: u8 = 27; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB27R { bits } } #[doc = "Bit 28 - Filter bits"] #[inline(always)] pub fn fb28(&self) -> FB28R { let bits = { const MASK: bool = true; const OFFSET: u8 = 28; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB28R { bits } } #[doc = "Bit 29 - Filter bits"] #[inline(always)] pub fn fb29(&self) -> FB29R { let bits = { const MASK: bool = true; const OFFSET: u8 = 29; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB29R { bits } } #[doc = "Bit 30 - Filter bits"] #[inline(always)] pub fn fb30(&self) -> FB30R { let bits = { const MASK: bool = true; const OFFSET: u8 = 30; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB30R { bits } } #[doc = "Bit 31 - Filter bits"] #[inline(always)] pub fn fb31(&self) -> FB31R { let bits = { const MASK: bool = true; const OFFSET: u8 = 31; ((self.bits >> OFFSET) & MASK as u32) != 0 }; FB31R { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline(always)] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Filter bits"] #[inline(always)] pub fn fb0(&mut self) -> _FB0W { _FB0W { w: self } } #[doc = "Bit 1 - Filter bits"] #[inline(always)] pub fn fb1(&mut self) -> _FB1W { _FB1W { w: self } } #[doc = "Bit 2 - Filter bits"] #[inline(always)] pub fn fb2(&mut self) -> _FB2W { _FB2W { w: self } } #[doc = "Bit 3 - Filter bits"] #[inline(always)] pub fn fb3(&mut self) -> _FB3W { _FB3W { w: self } } #[doc = "Bit 4 - Filter bits"] #[inline(always)] pub fn fb4(&mut self) -> _FB4W { _FB4W { w: self } } #[doc = "Bit 5 - Filter bits"] #[inline(always)] pub fn fb5(&mut self) -> _FB5W { _FB5W { w: self } } #[doc = "Bit 6 - Filter bits"] #[inline(always)] pub fn fb6(&mut self) -> _FB6W { _FB6W { w: self } } #[doc = "Bit 7 - Filter bits"] #[inline(always)] pub fn fb7(&mut self) -> _FB7W { _FB7W { w: self } } #[doc = "Bit 8 - Filter bits"] #[inline(always)] pub fn fb8(&mut self) -> _FB8W { _FB8W { w: self } } #[doc = "Bit 9 - Filter bits"] #[inline(always)] pub fn fb9(&mut self) -> _FB9W { _FB9W { w: self } } #[doc = "Bit 10 - Filter bits"] #[inline(always)] pub fn fb10(&mut self) -> _FB10W { _FB10W { w: self } } #[doc = "Bit 11 - Filter bits"] #[inline(always)] pub fn fb11(&mut self) -> _FB11W { _FB11W { w: self } } #[doc = "Bit 12 - Filter bits"] #[inline(always)] pub fn fb12(&mut self) -> _FB12W { _FB12W { w: self } } #[doc = "Bit 13 - Filter bits"] #[inline(always)] pub fn fb13(&mut self) -> _FB13W { _FB13W { w: self } } #[doc = "Bit 14 - Filter bits"] #[inline(always)] pub fn fb14(&mut self) -> _FB14W { _FB14W { w: self } } #[doc = "Bit 15 - Filter bits"] #[inline(always)] pub fn fb15(&mut self) -> _FB15W { _FB15W { w: self } } #[doc = "Bit 16 - Filter bits"] #[inline(always)] pub fn fb16(&mut self) -> _FB16W { _FB16W { w: self } } #[doc = "Bit 17 - Filter bits"] #[inline(always)] pub fn fb17(&mut self) -> _FB17W { _FB17W { w: self } } #[doc = "Bit 18 - Filter bits"] #[inline(always)] pub fn fb18(&mut self) -> _FB18W { _FB18W { w: self } } #[doc = "Bit 19 - Filter bits"] #[inline(always)] pub fn fb19(&mut self) -> _FB19W { _FB19W { w: self } } #[doc = "Bit 20 - Filter bits"] #[inline(always)] pub fn fb20(&mut self) -> _FB20W { _FB20W { w: self } } #[doc = "Bit 21 - Filter bits"] #[inline(always)] pub fn fb21(&mut self) -> _FB21W { _FB21W { w: self } } #[doc = "Bit 22 - Filter bits"] #[inline(always)] pub fn fb22(&mut self) -> _FB22W { _FB22W { w: self } } #[doc = "Bit 23 - Filter bits"] #[inline(always)] pub fn fb23(&mut self) -> _FB23W { _FB23W { w: self } } #[doc = "Bit 24 - Filter bits"] #[inline(always)] pub fn fb24(&mut self) -> _FB24W { _FB24W { w: self } } #[doc = "Bit 25 - Filter bits"] #[inline(always)] pub fn fb25(&mut self) -> _FB25W { _FB25W { w: self } } #[doc = "Bit 26 - Filter bits"] #[inline(always)] pub fn fb26(&mut self) -> _FB26W { _FB26W { w: self } } #[doc = "Bit 27 - Filter bits"] #[inline(always)] pub fn fb27(&mut self) -> _FB27W { _FB27W { w: self } } #[doc = "Bit 28 - Filter bits"] #[inline(always)] pub fn fb28(&mut self) -> _FB28W { _FB28W { w: self } } #[doc = "Bit 29 - Filter bits"] #[inline(always)] pub fn fb29(&mut self) -> _FB29W { _FB29W { w: self } } #[doc = "Bit 30 - Filter bits"] #[inline(always)] pub fn fb30(&mut self) -> _FB30W { _FB30W { w: self } } #[doc = "Bit 31 - Filter bits"] #[inline(always)] pub fn fb31(&mut self) -> _FB31W { _FB31W { w: self } } }<|fim▁end|>
#[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 3;
<|file_name|>use_sax.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- from xml.parsers.expat import ParserCreate class DefaultSaxHandler(object): def start_element(self, name, attrs): print('sax:start_element: %s, attrs: %s' % (name, str(attrs))) def end_element(self, name): print('sax:end_element: %s' % name) def char_data(self, text): print('sax:char_data: %s' % text) xml = r'''<?xml version="1.0"?> <ol> <li><a href="/python">Python</a></li> <li><a href="/ruby">Ruby</a></li> </ol> ''' handler = DefaultSaxHandler() parser = ParserCreate() parser.StartElementHandler = handler.start_element<|fim▁hole|>parser.Parse(xml)<|fim▁end|>
parser.EndElementHandler = handler.end_element parser.CharacterDataHandler = handler.char_data
<|file_name|>application_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 Rohith All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package marathon import ( "fmt" "io/ioutil" "net/http" "net/url" "strings" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestApplicationDependsOn(t *testing.T) { app := NewDockerApplication() app.DependsOn("fake-app") app.DependsOn("fake-app1", "fake-app2") assert.Equal(t, 3, len(app.Dependencies)) } func TestApplicationMemory(t *testing.T) { app := NewDockerApplication() app.Memory(50.0) assert.Equal(t, 50.0, *app.Mem) } func TestApplicationString(t *testing.T) { app := NewDockerApplication(). Name("my-app"). CPU(0.1). Memory(64). Storage(0.0). Count(2). AddArgs("/usr/sbin/apache2ctl", "-D", "FOREGROUND"). AddEnv("NAME", "frontend_http"). AddEnv("SERVICE_80_NAME", "test_http"). AddEnvSecret("SECRET1", "secret1", "/path/to/secret") app. Container.Docker.Container("quay.io/gambol99/apache-php:latest"). Bridged(). Expose(80). Expose(443) app, err := app.CheckHTTP("/health", 80, 5) assert.Nil(t, err) expectedAppJSONBytes, err := ioutil.ReadFile("tests/app-definitions/TestApplicationString-output.json") if err != nil { panic(err) } expectedAppJSON := strings.TrimSpace(string(expectedAppJSONBytes)) assert.Equal(t, expectedAppJSON, app.String()) } func TestApplicationCount(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Instances) app.Count(1) assert.Equal(t, 1, *app.Instances) } func TestApplicationStorage(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Disk) app.Storage(0.10) assert.Equal(t, 0.10, *app.Disk) } func TestApplicationAllTaskRunning(t *testing.T) { app := NewDockerApplication() app.Instances = nil app.Tasks = nil assert.True(t, app.AllTaskRunning()) var cnt int app.Instances = &cnt cnt = 0 assert.True(t, app.AllTaskRunning()) cnt = 1<|fim▁hole|> app.Tasks = []*Task{} app.TasksRunning = 1 assert.True(t, app.AllTaskRunning()) cnt = 2 app.TasksRunning = 1 assert.False(t, app.AllTaskRunning()) } func TestApplicationName(t *testing.T) { app := NewDockerApplication() assert.Equal(t, "", app.ID) app.Name(fakeAppName) assert.Equal(t, fakeAppName, app.ID) } func TestApplicationCommand(t *testing.T) { app := NewDockerApplication() assert.Equal(t, "", app.ID) app.Command("format C:") assert.Equal(t, "format C:", *app.Cmd) } func TestApplicationCPU(t *testing.T) { app := NewDockerApplication() assert.Equal(t, 0.0, app.CPUs) app.CPU(0.1) assert.Equal(t, 0.1, app.CPUs) } func TestApplicationSetGPUs(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.GPUs) app.SetGPUs(0.1) assert.Equal(t, 0.1, *app.GPUs) } func TestApplicationEmptyGPUs(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.GPUs) app.EmptyGPUs() assert.Equal(t, 0.0, *app.GPUs) } func TestApplicationArgs(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Args) app.AddArgs("-p").AddArgs("option", "-v") assert.Equal(t, 3, len(*app.Args)) assert.Equal(t, "-p", (*app.Args)[0]) assert.Equal(t, "option", (*app.Args)[1]) assert.Equal(t, "-v", (*app.Args)[2]) app.EmptyArgs() assert.NotNil(t, app.Args) assert.Equal(t, 0, len(*app.Args)) } func ExampleApplication_AddConstraint() { app := NewDockerApplication() // add two constraints app.AddConstraint("hostname", "UNIQUE"). AddConstraint("rack_id", "CLUSTER", "rack-1") } func TestApplicationConstraints(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Constraints) app.AddConstraint("hostname", "UNIQUE"). AddConstraint("rack_id", "CLUSTER", "rack-1") assert.Equal(t, 2, len(*app.Constraints)) assert.Equal(t, []string{"hostname", "UNIQUE"}, (*app.Constraints)[0]) assert.Equal(t, []string{"rack_id", "CLUSTER", "rack-1"}, (*app.Constraints)[1]) app.EmptyConstraints() assert.NotNil(t, app.Constraints) assert.Equal(t, 0, len(*app.Constraints)) } func TestApplicationLabels(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Labels) app.AddLabel("hello", "world").AddLabel("foo", "bar") assert.Equal(t, 2, len(*app.Labels)) assert.Equal(t, "world", (*app.Labels)["hello"]) assert.Equal(t, "bar", (*app.Labels)["foo"]) app.EmptyLabels() assert.NotNil(t, app.Labels) assert.Equal(t, 0, len(*app.Labels)) } func TestApplicationEnvs(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Env) app.AddEnv("hello", "world").AddEnv("foo", "bar").AddEnvSecret("top", "secret1", "path/to/my/secret") assert.Equal(t, 3, len(*app.Env)) assert.Equal(t, "world", (*app.Env)["hello"]) assert.Equal(t, "bar", (*app.Env)["foo"]) assert.Equal(t, map[string]string{"secret": "secret1"}, (*app.Env)["top"]) app.EmptyEnvs() assert.NotNil(t, app.Env) assert.Equal(t, 0, len(*app.Env)) } func TestApplicationSecrets(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Env) app.AddEnvSecret("top", "secret0", "path/to/my/secret") app.AddEnvSecret("top2", "secret1", "path/to/my/other/secret") assert.Equal(t, 2, len(*app.Secrets)) assert.Equal(t, Secret{Source: "path/to/my/secret"}, (*app.Secrets)["secret0"]) assert.Equal(t, Secret{Source: "path/to/my/other/secret"}, (*app.Secrets)["secret1"]) app.EmptySecrets() assert.NotNil(t, app.Secrets) assert.Equal(t, 0, len(*app.Secrets)) } func TestApplicationSetExecutor(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Executor) app.SetExecutor("executor") assert.Equal(t, "executor", *app.Executor) app.SetExecutor("") assert.Equal(t, "", *app.Executor) } func TestApplicationHealthChecks(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.HealthChecks) app.AddHealthCheck(HealthCheck{}.SetPath("/check1")). AddHealthCheck(HealthCheck{}.SetPath("/check2")) assert.Equal(t, 2, len(*app.HealthChecks)) assert.Equal(t, HealthCheck{}.SetPath("/check1"), (*app.HealthChecks)[0]) assert.Equal(t, HealthCheck{}.SetPath("/check2"), (*app.HealthChecks)[1]) app.EmptyHealthChecks() assert.NotNil(t, app.HealthChecks) assert.Equal(t, 0, len(*app.HealthChecks)) } func TestApplicationReadinessChecks(t *testing.T) { app := NewDockerApplication() require.Nil(t, app.HealthChecks) rc := ReadinessCheck{} rc.SetName("/readiness") app.AddReadinessCheck(rc) require.Equal(t, 1, len(*app.ReadinessChecks)) assert.Equal(t, "/readiness", *((*app.ReadinessChecks)[0].Name)) app.EmptyReadinessChecks() require.NotNil(t, app.ReadinessChecks) assert.Equal(t, 0, len(*app.ReadinessChecks)) } func TestApplicationPortDefinitions(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.PortDefinitions) app.AddPortDefinition(PortDefinition{Protocol: "tcp", Name: "es"}.SetPort(9201).AddLabel("foo", "bar")). AddPortDefinition(PortDefinition{Protocol: "udp,tcp", Name: "syslog"}.SetPort(514)) assert.Equal(t, 2, len(*app.PortDefinitions)) assert.Equal(t, PortDefinition{Protocol: "tcp", Name: "es"}.SetPort(9201).AddLabel("foo", "bar"), (*app.PortDefinitions)[0]) assert.Equal(t, 1, len(*(*app.PortDefinitions)[0].Labels)) assert.Equal(t, PortDefinition{Protocol: "udp,tcp", Name: "syslog"}.SetPort(514), (*app.PortDefinitions)[1]) assert.Nil(t, (*app.PortDefinitions)[1].Labels) (*app.PortDefinitions)[0].EmptyLabels() assert.NotNil(t, (*app.PortDefinitions)[0].Labels) assert.Equal(t, 0, len(*(*app.PortDefinitions)[0].Labels)) app.EmptyPortDefinitions() assert.NotNil(t, app.PortDefinitions) assert.Equal(t, 0, len(*app.PortDefinitions)) } func TestHasHealthChecks(t *testing.T) { app := NewDockerApplication() assert.False(t, app.HasHealthChecks()) app.Container.Docker.Container("quay.io/gambol99/apache-php:latest").Expose(80) _, err := app.CheckTCP(80, 10) assert.NoError(t, err) assert.True(t, app.HasHealthChecks()) } func TestApplicationCheckTCP(t *testing.T) { app := NewDockerApplication() assert.False(t, app.HasHealthChecks()) _, err := app.CheckTCP(80, 10) assert.Error(t, err) assert.False(t, app.HasHealthChecks()) app.Container.Docker.Container("quay.io/gambol99/apache-php:latest").Expose(80) _, err = app.CheckTCP(80, 10) assert.NoError(t, err) assert.True(t, app.HasHealthChecks()) check := (*app.HealthChecks)[0] assert.Equal(t, "TCP", check.Protocol) assert.Equal(t, 10, check.IntervalSeconds) assert.Equal(t, 0, *check.PortIndex) } func TestApplicationCheckHTTP(t *testing.T) { app := NewDockerApplication() assert.False(t, app.HasHealthChecks()) _, err := app.CheckHTTP("/", 80, 10) assert.Error(t, err) assert.False(t, app.HasHealthChecks()) app.Container.Docker.Container("quay.io/gambol99/apache-php:latest").Expose(80) _, err = app.CheckHTTP("/health", 80, 10) assert.NoError(t, err) assert.True(t, app.HasHealthChecks()) check := (*app.HealthChecks)[0] assert.Equal(t, "HTTP", check.Protocol) assert.Equal(t, 10, check.IntervalSeconds) assert.Equal(t, "/health", *check.Path) assert.Equal(t, 0, *check.PortIndex) } func TestCreateApplication(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() application := NewDockerApplication() application.Name(fakeAppName) app, err := endpoint.Client.CreateApplication(application) assert.NoError(t, err) assert.NotNil(t, app) assert.Equal(t, application.ID, fakeAppName) assert.Equal(t, app.Deployments[0]["id"], "f44fd4fc-4330-4600-a68b-99c7bd33014a") } func TestUpdateApplication(t *testing.T) { for _, force := range []bool{false, true} { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() application := NewDockerApplication() application.Name(fakeAppName) id, err := endpoint.Client.UpdateApplication(application, force) assert.NoError(t, err) assert.Equal(t, id.DeploymentID, "83b215a6-4e26-4e44-9333-5c385eda6438") assert.Equal(t, id.Version, "2014-08-26T07:37:50.462Z") } } func TestApplications(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() applications, err := endpoint.Client.Applications(nil) assert.NoError(t, err) assert.NotNil(t, applications) assert.Equal(t, len(applications.Apps), 2) assert.Equal(t, (*applications.Apps[0].Env)["SECRET1"].(map[string]interface{})["secret"].(string), "secret0") assert.Equal(t, (*applications.Apps[0].Secrets)["secret0"].Source, "secret/definition/id") v := url.Values{} v.Set("cmd", "nginx") applications, err = endpoint.Client.Applications(v) assert.NoError(t, err) assert.NotNil(t, applications) assert.Equal(t, len(applications.Apps), 1) } func TestApplicationsEmbedTaskStats(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() v := url.Values{} v.Set("embed", "apps.taskStats") applications, err := endpoint.Client.Applications(v) assert.NoError(t, err) assert.NotNil(t, applications) assert.Equal(t, len(applications.Apps), 1) assert.NotNil(t, applications.Apps[0].TaskStats) assert.Equal(t, applications.Apps[0].TaskStats["startedAfterLastScaling"].Stats.Counts["healthy"], 1) assert.Equal(t, applications.Apps[0].TaskStats["startedAfterLastScaling"].Stats.LifeTime["averageSeconds"], 17024.575) } func TestApplicationsEmbedReadiness(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() v := url.Values{} v.Set("embed", "apps.readiness") applications, err := endpoint.Client.Applications(v) require.NoError(t, err) require.NotNil(t, applications) require.Equal(t, len(applications.Apps), 1) require.NotNil(t, applications.Apps[0].ReadinessCheckResults) require.True(t, len(*applications.Apps[0].ReadinessCheckResults) > 0) actualRes := (*applications.Apps[0].ReadinessCheckResults)[0] expectedRes := ReadinessCheckResult{ Name: "myReadyCheck", TaskID: "test_frontend_app1.c9de6033", Ready: false, LastResponse: ReadinessLastResponse{ Body: "{}", ContentType: "application/json", Status: 500, }, } assert.Equal(t, expectedRes, actualRes) } func TestListApplications(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() applications, err := endpoint.Client.ListApplications(nil) assert.NoError(t, err) assert.NotNil(t, applications) assert.Equal(t, len(applications), 2) assert.Equal(t, applications[0], fakeAppName) assert.Equal(t, applications[1], fakeAppNameBroken) } func TestApplicationVersions(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() versions, err := endpoint.Client.ApplicationVersions(fakeAppName) assert.NoError(t, err) assert.NotNil(t, versions) assert.NotNil(t, versions.Versions) assert.Equal(t, len(versions.Versions), 1) assert.Equal(t, versions.Versions[0], "2014-04-04T06:25:31.399Z") /* check we get an error on app not there */ versions, err = endpoint.Client.ApplicationVersions("/not/there") assert.Error(t, err) } func TestRestartApplication(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() id, err := endpoint.Client.RestartApplication(fakeAppName, false) assert.NoError(t, err) assert.NotNil(t, id) assert.Equal(t, "83b215a6-4e26-4e44-9333-5c385eda6438", id.DeploymentID) assert.Equal(t, "2014-08-26T07:37:50.462Z", id.Version) id, err = endpoint.Client.RestartApplication("/not/there", false) assert.Error(t, err) assert.Nil(t, id) } func TestApplicationUris(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Uris) app.AddUris("file://uri1.tar.gz").AddUris("file://uri2.tar.gz", "file://uri3.tar.gz") assert.Equal(t, 3, len(*app.Uris)) assert.Equal(t, "file://uri1.tar.gz", (*app.Uris)[0]) assert.Equal(t, "file://uri2.tar.gz", (*app.Uris)[1]) assert.Equal(t, "file://uri3.tar.gz", (*app.Uris)[2]) app.EmptyUris() assert.NotNil(t, app.Uris) assert.Equal(t, 0, len(*app.Uris)) } func TestApplicationFetchURIs(t *testing.T) { app := NewDockerApplication() assert.Nil(t, app.Fetch) app.AddFetchURIs(Fetch{URI: "file://uri1.tar.gz"}). AddFetchURIs(Fetch{URI: "file://uri2.tar.gz"}, Fetch{URI: "file://uri3.tar.gz"}) assert.Equal(t, 3, len(*app.Fetch)) assert.Equal(t, Fetch{URI: "file://uri1.tar.gz"}, (*app.Fetch)[0]) assert.Equal(t, Fetch{URI: "file://uri2.tar.gz"}, (*app.Fetch)[1]) assert.Equal(t, Fetch{URI: "file://uri3.tar.gz"}, (*app.Fetch)[2]) app.EmptyUris() assert.NotNil(t, app.Uris) assert.Equal(t, 0, len(*app.Uris)) } func TestSetApplicationVersion(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() deployment, err := endpoint.Client.SetApplicationVersion(fakeAppName, &ApplicationVersion{Version: "2014-08-26T07:37:50.462Z"}) assert.NoError(t, err) assert.NotNil(t, deployment) assert.NotNil(t, deployment.Version) assert.NotNil(t, deployment.DeploymentID) assert.Equal(t, deployment.Version, "2014-08-26T07:37:50.462Z") assert.Equal(t, deployment.DeploymentID, "83b215a6-4e26-4e44-9333-5c385eda6438") _, err = endpoint.Client.SetApplicationVersion("/not/there", &ApplicationVersion{Version: "2014-04-04T06:25:31.399Z"}) assert.Error(t, err) } func TestHasApplicationVersion(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() found, err := endpoint.Client.HasApplicationVersion(fakeAppName, "2014-04-04T06:25:31.399Z") assert.NoError(t, err) assert.True(t, found) found, err = endpoint.Client.HasApplicationVersion(fakeAppName, "###2015-04-04T06:25:31.399Z") assert.NoError(t, err) assert.False(t, found) } func TestDeleteApplication(t *testing.T) { for _, force := range []bool{false, true} { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() id, err := endpoint.Client.DeleteApplication(fakeAppName, force) assert.NoError(t, err) assert.NotNil(t, id) assert.Equal(t, "83b215a6-4e26-4e44-9333-5c385eda6438", id.DeploymentID) assert.Equal(t, "2014-08-26T07:37:50.462Z", id.Version) id, err = endpoint.Client.DeleteApplication("no_such_app", force) assert.Error(t, err) } } func TestApplicationOK(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() ok, err := endpoint.Client.ApplicationOK(fakeAppName) assert.NoError(t, err) assert.True(t, ok) ok, err = endpoint.Client.ApplicationOK(fakeAppNameBroken) assert.NoError(t, err) assert.False(t, ok) ok, err = endpoint.Client.ApplicationOK(fakeAppNameUnhealthy) assert.NoError(t, err) assert.False(t, ok) } func verifyApplication(application *Application, t *testing.T) { assert.NotNil(t, application) assert.Equal(t, application.ID, fakeAppName) assert.NotNil(t, application.HealthChecks) assert.NotNil(t, application.Tasks) assert.Equal(t, len(*application.HealthChecks), 1) assert.Equal(t, len(application.Tasks), 2) } func TestApplication(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() application, err := endpoint.Client.Application(fakeAppName) assert.NoError(t, err) verifyApplication(application, t) _, err = endpoint.Client.Application("no_such_app") assert.Error(t, err) apiErr, ok := err.(*APIError) assert.True(t, ok) assert.Equal(t, ErrCodeNotFound, apiErr.ErrCode) config := NewDefaultConfig() config.URL = "http://non-existing-marathon-host.local:5555" // Reduce timeout to speed up test execution time. config.HTTPClient = &http.Client{ Timeout: 100 * time.Millisecond, } endpoint = newFakeMarathonEndpoint(t, &configContainer{ client: &config, }) defer endpoint.Close() _, err = endpoint.Client.Application(fakeAppName) assert.Error(t, err) _, ok = err.(*APIError) assert.False(t, ok) } func TestApplicationConfiguration(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() application, err := endpoint.Client.ApplicationByVersion(fakeAppName, "2014-09-12T23:28:21.737Z") assert.NoError(t, err) verifyApplication(application, t) _, err = endpoint.Client.ApplicationByVersion(fakeAppName, "no_such_version") assert.Error(t, err) apiErr, ok := err.(*APIError) assert.True(t, ok) assert.Equal(t, ErrCodeNotFound, apiErr.ErrCode) _, err = endpoint.Client.ApplicationByVersion("no_such_app", "latest") assert.Error(t, err) apiErr, ok = err.(*APIError) assert.True(t, ok) assert.Equal(t, ErrCodeNotFound, apiErr.ErrCode) } func TestWaitOnApplication(t *testing.T) { waitTime := 100 * time.Millisecond tests := []struct { desc string timeout time.Duration appName string testScope string shouldSucceed bool }{ { desc: "initially existing app", timeout: 0, appName: fakeAppName, shouldSucceed: true, }, { desc: "delayed existing app | timeout > ticker", timeout: 200 * time.Millisecond, appName: fakeAppName, testScope: "wait-on-app", shouldSucceed: true, }, { desc: "delayed existing app | timeout < ticker", timeout: 50 * time.Millisecond, appName: fakeAppName, testScope: "wait-on-app", shouldSucceed: false, }, { desc: "missing app | timeout > ticker", timeout: 200 * time.Millisecond, appName: "no_such_app", shouldSucceed: false, }, { desc: "missing app | timeout < ticker", timeout: 50 * time.Millisecond, appName: "no_such_app", shouldSucceed: false, }, } for _, test := range tests { defaultConfig := NewDefaultConfig() defaultConfig.PollingWaitTime = waitTime configs := &configContainer{ client: &defaultConfig, server: &serverConfig{ scope: test.testScope, }, } endpoint := newFakeMarathonEndpoint(t, configs) defer endpoint.Close() errCh := make(chan error) go func() { errCh <- endpoint.Client.WaitOnApplication(test.appName, test.timeout) }() select { case <-time.After(400 * time.Millisecond): assert.Fail(t, fmt.Sprintf("%s: WaitOnApplication did not complete in time", test.desc)) case err := <-errCh: if test.shouldSucceed { assert.NoError(t, err, test.desc) } else { assert.IsType(t, err, ErrTimeoutError, test.desc) } } } } func TestAppExistAndRunning(t *testing.T) { endpoint := newFakeMarathonEndpoint(t, nil) defer endpoint.Close() client := endpoint.Client.(*marathonClient) assert.True(t, client.appExistAndRunning(fakeAppName)) assert.False(t, client.appExistAndRunning("no_such_app")) } func TestSetIPPerTask(t *testing.T) { app := Application{} app.Ports = append(app.Ports, 10) app.AddPortDefinition(PortDefinition{}) assert.Nil(t, app.IPAddressPerTask) assert.Equal(t, 1, len(app.Ports)) assert.Equal(t, 1, len(*app.PortDefinitions)) app.SetIPAddressPerTask(IPAddressPerTask{}) assert.NotNil(t, app.IPAddressPerTask) assert.Equal(t, 0, len(app.Ports)) assert.Equal(t, 0, len(*app.PortDefinitions)) } func TestIPAddressPerTask(t *testing.T) { ipPerTask := IPAddressPerTask{} assert.Nil(t, ipPerTask.Groups) assert.Nil(t, ipPerTask.Labels) assert.Nil(t, ipPerTask.Discovery) ipPerTask. AddGroup("label"). AddLabel("key", "value"). SetDiscovery(Discovery{}) assert.Equal(t, 1, len(*ipPerTask.Groups)) assert.Equal(t, "label", (*ipPerTask.Groups)[0]) assert.Equal(t, "value", (*ipPerTask.Labels)["key"]) assert.NotEmpty(t, ipPerTask.Discovery) ipPerTask.EmptyGroups() assert.Equal(t, 0, len(*ipPerTask.Groups)) ipPerTask.EmptyLabels() assert.Equal(t, 0, len(*ipPerTask.Labels)) } func TestIPAddressPerTaskDiscovery(t *testing.T) { disc := Discovery{} assert.Nil(t, disc.Ports) disc.AddPort(Port{}) assert.NotNil(t, disc.Ports) assert.Equal(t, 1, len(*disc.Ports)) disc.EmptyPorts() assert.NotNil(t, disc.Ports) assert.Equal(t, 0, len(*disc.Ports)) } func TestUpgradeStrategy(t *testing.T) { app := Application{} assert.Nil(t, app.UpgradeStrategy) app.SetUpgradeStrategy(UpgradeStrategy{}.SetMinimumHealthCapacity(1.0).SetMaximumOverCapacity(0.0)) us := app.UpgradeStrategy assert.Equal(t, 1.0, *us.MinimumHealthCapacity) assert.Equal(t, 0.0, *us.MaximumOverCapacity) app.EmptyUpgradeStrategy() us = app.UpgradeStrategy assert.NotNil(t, us) assert.Nil(t, us.MinimumHealthCapacity) assert.Nil(t, us.MaximumOverCapacity) }<|fim▁end|>
assert.False(t, app.AllTaskRunning())
<|file_name|>partners_plugin.py<|end_file_name|><|fim▁begin|>from cms.models.pluginmodel import CMSPlugin from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from django.utils.translation import gettext_lazy as _ from django.utils.translation import get_language from partners.models import Partner<|fim▁hole|> name = _("Partners") model = CMSPlugin render_template = "partners/partners_plugin.html" text_enabled = False allow_children = False def render(self, context, instance, placeholder): language = get_language() if language is None: language = 'en' partners = Partner.objects.filter(active=True).translated(language).order_by('translations__name').all() context.update({ 'partners': partners, }) return context plugin_pool.register_plugin(PartnersPlugin)<|fim▁end|>
class PartnersPlugin(CMSPluginBase):
<|file_name|>SerializeWriter.js<|end_file_name|><|fim▁begin|>/* * (c) Copyright Ascensio System SIA 2010-2014 * * This program is a free software product. You can redistribute it and/or * modify it under the terms of the GNU Affero General Public License (AGPL) * version 3 as published by the Free Software Foundation. In accordance with * Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect * that Ascensio System SIA expressly excludes the warranty of non-infringement * of any third-party rights. * * This program is distributed WITHOUT ANY WARRANTY; without even the implied * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For * details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html * * You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia, * EU, LV-1021. * * The interactive user interfaces in modified source and object code versions * of the Program must display Appropriate Legal Notices, as required under * Section 5 of the GNU AGPL version 3. * * Pursuant to Section 7(b) of the License you must retain the original Product * logo when distributing the program. Pursuant to Section 7(e) we decline to * grant you any rights under trademark law for use of our trademarks. * * All the Product's GUI elements, including illustrations and icon sets, as * well as technical writing content are licensed under the terms of the * Creative Commons Attribution-ShareAlike 4.0 International. See the License * terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode * */ var c_oMainTables = { Main: 255, App: 1, Core: 2, Presentation: 3, ViewProps: 4, VmlDrawing: 5, TableStyles: 6, Themes: 20, ThemeOverride: 21, SlideMasters: 22, SlideLayouts: 23, Slides: 24, NotesMasters: 25, NotesSlides: 26, HandoutMasters: 30, SlideRels: 40, ThemeRels: 41, ImageMap: 42, FontMap: 43, FontsEmbedded: 44 }; function CSeekTableEntry() { this.Type = 0; this.SeekPos = 0; } function GUID() { var S4 = function () { var ret = (((1 + Math.random()) * 65536) | 0).toString(16).substring(1); ret = ret.toUpperCase(); return ret; }; return (S4() + S4() + "-" + S4() + "-" + S4() + "-" + S4() + "-" + S4() + S4() + S4()); } function CBinaryFileWriter() { this.tableStylesGuides = new Array(); this.Init = function () { var _canvas = document.createElement("canvas"); var _ctx = _canvas.getContext("2d"); this.len = 1024 * 1024 * 5; this.ImData = _ctx.createImageData(this.len / 4, 1); this.data = this.ImData.data; this.pos = 0; delete _canvas; }; this.IsWordWriter = false; this.ImData = null; this.data = null; this.len = 0; this.pos = 0; this.Init(); this.UseContinueWriter = false; this.IsUseFullUrl = false; this.DocumentOrigin = ""; this.PresentationThemesOrigin = ""; var oThis = this; this.Start_UseFullUrl = function (origin) { this.IsUseFullUrl = true; this.DocumentOrigin = origin; }; this.Start_UseDocumentOrigin = function (origin) { this.PresentationThemesOrigin = origin + "/presentationthemes/"; }; this.End_UseFullUrl = function () { this.IsUseFullUrl = false; }; this.Copy = function (oMemory, nPos, nLen) { for (var Index = 0; Index < nLen; Index++) { this.CheckSize(1); this.data[this.pos++] = oMemory.data[Index + nPos]; } }; this.CheckSize = function (count) { if (this.pos + count >= this.len) { var _canvas = document.createElement("canvas"); var _ctx = _canvas.getContext("2d"); var oldImData = this.ImData; var oldData = this.data; var oldPos = this.pos; this.len *= 2; this.ImData = _ctx.createImageData(this.len / 4, 1); this.data = this.ImData.data; var newData = this.data; for (var i = 0; i < this.pos; i++) { newData[i] = oldData[i]; } delete _canvas; } }; this.GetBase64Memory = function () { return Base64Encode(this.data, this.pos, 0); }; this.GetBase64Memory2 = function (nPos, nLen) { return Base64Encode(this.data, nLen, nPos); }; this.GetCurPosition = function () { return this.pos; }; this.Seek = function (nPos) { this.pos = nPos; }; this.Skip = function (nDif) { this.pos += nDif; }; this.WriteBool = function (val) { this.CheckSize(1); if (false == val) { this.data[this.pos++] = 0; } else { this.data[this.pos++] = 1; } }; this.WriteUChar = function (val) { this.CheckSize(1); this.data[this.pos++] = val; }; this.WriteUShort = function (val) { this.CheckSize(2); this.data[this.pos++] = (val) & 255; this.data[this.pos++] = (val >>> 8) & 255; }; this.WriteULong = function (val) { this.CheckSize(4); this.data[this.pos++] = (val) & 255; this.data[this.pos++] = (val >>> 8) & 255; this.data[this.pos++] = (val >>> 16) & 255; this.data[this.pos++] = (val >>> 24) & 255; }; this.WriteDouble = function (val) { this.WriteULong((val * 100000) >> 0); }; this.WriteString = function (text) { var count = text.length & 65535; this.WriteULong(count); this.CheckSize(count); for (var i = 0; i < count; i++) { var c = text.charCodeAt(i) & 255; this.data[this.pos++] = c; } }; this.WriteString2 = function (text) { var count = text.length & 2147483647; var countWrite = 2 * count; this.WriteULong(count); this.CheckSize(countWrite); for (var i = 0; i < count; i++) { var c = text.charCodeAt(i) & 65535; this.data[this.pos++] = c & 255; this.data[this.pos++] = (c >>> 8) & 255; } }; this.WriteBuffer = function (data, _pos, count) { this.CheckSize(count); for (var i = 0; i < count; i++) { this.data[this.pos++] = data[_pos + i]; } }; this.m_arStack = new Array(); this.m_lStackPosition = 0; this.m_arMainTables = new Array(); this.StartRecord = function (lType) { this.m_arStack[this.m_lStackPosition] = this.pos + 5; this.m_lStackPosition++; this.WriteUChar(lType); this.WriteULong(0); }; this.EndRecord = function () { this.m_lStackPosition--; var _seek = this.pos; this.pos = this.m_arStack[this.m_lStackPosition] - 4; this.WriteULong(_seek - this.m_arStack[this.m_lStackPosition]); this.pos = _seek; }; this.StartMainRecord = function (lType) { var oEntry = new CSeekTableEntry(); oEntry.Type = lType; oEntry.SeekPos = this.pos; this.m_arMainTables[this.m_arMainTables.length] = oEntry; }; this.WriteReserved = function (lCount) { this.CheckSize(lCount); var _d = this.data; var _p = this.pos; var _e = this.pos + lCount; while (_p < _e) { _d[_p++] = 0; } this.pos += lCount; }; this.WriteMainPart = function () { var _pos = this.pos; this.pos = 0; var _count = this.m_arMainTables.length; for (var i = 0; i < _count; i++) { this.WriteUChar(this.m_arMainTables[i].Type); this.WriteULong(this.m_arMainTables[i].SeekPos); } this.pos = _pos; }; this._WriteString1 = function (type, val) { this.WriteUChar(type); this.WriteString2(val); }; this._WriteString2 = function (type, val) { if (val != null) { this._WriteString1(type, val); } }; this._WriteUChar1 = function (type, val) { this.WriteUChar(type); this.WriteUChar(val); }; this._WriteUChar2 = function (type, val) { if (val != null) { this._WriteUChar1(type, val); } }; this._WriteBool1 = function (type, val) { this.WriteUChar(type); this.WriteBool(val); }; this._WriteBool2 = function (type, val) { if (val != null) { this._WriteBool1(type, val); } }; this._WriteInt1 = function (type, val) { this.WriteUChar(type); this.WriteULong(val); }; this._WriteInt2 = function (type, val) { if (val != null) { this._WriteInt1(type, val); } }; this._WriteInt3 = function (type, val, scale) { this._WriteInt1(type, val * scale); }; this._WriteInt4 = function (type, val, scale) { if (val != null) { this._WriteInt1(type, (val * scale) >> 0); } }; this._WriteDouble1 = function (type, val) { var _val = val * 10000; this._WriteInt1(type, _val); }; this._WriteDouble2 = function (type, val) { if (val != null) { this._WriteDouble1(type, val); } }; this._WriteLimit1 = this._WriteUChar1; this._WriteLimit2 = this._WriteUChar2; this.WriteRecord1 = function (type, val, func_write) { this.StartRecord(type); func_write(val); this.EndRecord(); }; this.WriteRecord2 = function (type, val, func_write) { if (null != val) { this.StartRecord(type); func_write(val); this.EndRecord(); } }; this.WriteRecord3 = function (type, val, func_write) { if (null != val) { var _start_pos = this.pos; this.StartRecord(type); func_write(val); this.EndRecord(); if ((_start_pos + 5) == this.pos) { this.pos -= 5; return false; } return true; } return false; }; this.WriteRecordArray = function (type, subtype, val_array, func_element_write) { this.StartRecord(type); var len = val_array.length; this.WriteULong(len); for (var i = 0; i < len; i++) { this.WriteRecord1(subtype, val_array[i], func_element_write); } this.EndRecord(); }; this.font_map = {}; this.image_map = {}; this.WriteDocument = function (presentation) { this.font_map = {}; this.image_map = {}; this.WriteReserved(5 * 30); this.StartMainRecord(c_oMainTables.Main); this.WriteULong(1347441753); this.WriteULong(0); if (presentation.App) { this.WriteApp(presentation.App); } if (presentation.Core) { this.WriteCore(presentation.Core); } if (presentation.ViewProps) { this.WriteViewProps(presentation.ViewProps); } this.WritePresentation(presentation); var _dst_themes = []; var _dst_masters = []; var _dst_layouts = []; var _dst_slides = []; var _dst_notes = []; var _dst_notesMasters = []; var _slides_rels = []; var _master_rels = []; var _slides = presentation.Slides; var _slide_count = _slides.length; for (var i = 0; i < _slide_count; i++) { _dst_slides[i] = _slides[i]; var _m = _slides[i].Layout.Master; var is_found = false; var _len_dst = _dst_masters.length; for (var j = 0; j < _len_dst; j++) { if (_dst_masters[j] == _m) { is_found = true; break; } } if (!is_found) { _dst_masters[_len_dst] = _m; var _m_rels = { ThemeIndex: 0, Layouts: new Array() }; var _lay_c = _m.sldLayoutLst.length; var _ind_l = _dst_layouts.length; for (var k = 0; k < _lay_c; k++) { _dst_layouts[_ind_l] = _m.sldLayoutLst[k]; _m_rels.Layouts[k] = _ind_l; _ind_l++; } _master_rels[_len_dst] = _m_rels; } var _layoutsC = _dst_layouts.length; for (var ii = 0; ii < _layoutsC; ii++) { if (_dst_layouts[ii] == _dst_slides[i].Layout) { _slides_rels[i] = ii; } } } var _dst_masters_len = _dst_masters.length; for (var i = 0; i < _dst_masters_len; i++) { var _t = _dst_masters[i].Theme; var is_found = false; var _len_dst = _dst_themes.length; for (var j = 0; j < _len_dst; j++) { if (_dst_themes[j] == _t) { is_found = true; break; } } if (!is_found) { _dst_themes[_len_dst] = _t; _master_rels[i].ThemeIndex = _len_dst; } } var _count_table_styles = presentation.globalTableStyles.length; if (0 < _count_table_styles) { for (var i = 0; i < _count_table_styles; i++) { this.tableStylesGuides[i] = "{" + GUID() + "}"; } this.StartMainRecord(c_oMainTables.TableStyles); this.StartRecord(c_oMainTables.SlideRels); this.WriteUChar(g_nodeAttributeStart); this._WriteString1(0, this.tableStylesGuides[0]); this.WriteUChar(g_nodeAttributeEnd); this.StartRecord(0); for (var i = 0; i < _count_table_styles; i++) { this.WriteTableStyle(i, presentation.globalTableStyles[i]); } this.EndRecord(); this.EndRecord(); } this.StartMainRecord(c_oMainTables.SlideRels); this.StartRecord(c_oMainTables.SlideRels); this.WriteUChar(g_nodeAttributeStart); for (var i = 0; i < _slide_count; i++) { this._WriteInt1(i, _slides_rels[i]); } this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); this.StartMainRecord(c_oMainTables.ThemeRels); this.StartRecord(c_oMainTables.ThemeRels); var _master_count = _dst_masters.length; this.WriteULong(_master_count); for (var i = 0; i < _master_count; i++) { this.StartRecord(0); this.WriteUChar(g_nodeAttributeStart); this._WriteInt1(0, _master_rels[i].ThemeIndex); this.WriteUChar(1); this.WriteString(_dst_masters[i].ImageBase64); this.WriteUChar(g_nodeAttributeEnd); var _lay_c = _master_rels[i].Layouts.length; this.WriteULong(_lay_c); for (var j = 0; j < _lay_c; j++) { this.StartRecord(0); this.WriteUChar(g_nodeAttributeStart); var _indL = _master_rels[i].Layouts[j]; this._WriteInt1(0, _indL); this.WriteUChar(1); this.WriteString(_dst_layouts[_indL].ImageBase64); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); } this.EndRecord(); } this.EndRecord(); var _count_arr = 0; _count_arr = _dst_themes.length; this.StartMainRecord(c_oMainTables.Themes); this.WriteULong(_count_arr); for (var i = 0; i < _count_arr; i++) { this.WriteTheme(_dst_themes[i]); } _count_arr = _dst_masters.length; this.StartMainRecord(c_oMainTables.SlideMasters); this.WriteULong(_count_arr); for (var i = 0; i < _count_arr; i++) { this.WriteSlideMaster(_dst_masters[i]); } _count_arr = _dst_layouts.length; this.StartMainRecord(c_oMainTables.SlideLayouts); this.WriteULong(_count_arr); for (var i = 0; i < _count_arr; i++) { this.WriteSlideLayout(_dst_layouts[i]); } _count_arr = _dst_slides.length; this.StartMainRecord(c_oMainTables.Slides); this.WriteULong(_count_arr); for (var i = 0; i < _count_arr; i++) { this.WriteSlide(_dst_slides[i]); } _count_arr = _dst_notes.length; this.StartMainRecord(c_oMainTables.NotesSlides); this.WriteULong(_count_arr); for (var i = 0; i < _count_arr; i++) { this.WriteSlideNote(_dst_notes[i]); } _count_arr = _dst_notesMasters.length; this.StartMainRecord(c_oMainTables.NotesMasters); this.WriteULong(_count_arr); for (var i = 0; i < _count_arr; i++) { this.WriteNoteMaster(_dst_notesMasters[i]); } this.StartMainRecord(c_oMainTables.FontMap); this.StartRecord(c_oMainTables.FontMap); this.WriteUChar(g_nodeAttributeStart); var _index_attr = 0; for (var i in this.font_map) { this.WriteUChar(_index_attr++); this.WriteString2(i); } this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); this.StartMainRecord(c_oMainTables.ImageMap); this.StartRecord(c_oMainTables.ImageMap); this.WriteUChar(g_nodeAttributeStart); _index_attr = 0; for (var i in this.image_map) { this.WriteUChar(_index_attr++); this.WriteString2(i); } this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); this.WriteMainPart(); var ret = "PPTY;v1;" + this.pos + ";"; return ret + this.GetBase64Memory(); }; this.WriteApp = function (app) { this.StartMainRecord(c_oMainTables.App); this.StartRecord(c_oMainTables.App); this.WriteUChar(g_nodeAttributeStart); this._WriteString2(0, app.Template); this._WriteString2(1, app.Application); this._WriteString2(2, app.PresentationFormat); this._WriteString2(3, app.Company); this._WriteString2(4, app.AppVersion); this._WriteInt2(5, app.TotalTime); this._WriteInt2(6, app.Words); this._WriteInt2(7, app.Paragraphs); this._WriteInt2(8, app.Slides); this._WriteInt2(9, app.Notes); this._WriteInt2(10, app.HiddenSlides); this._WriteInt2(11, app.MMClips); this._WriteBool2(12, app.ScaleCrop); this._WriteBool2(13, app.LinksUpToDate); this._WriteBool2(14, app.SharedDoc); this._WriteBool2(15, app.HyperlinksChanged); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); }; this.WriteCore = function (core) { this.StartMainRecord(c_oMainTables.Core); this.StartRecord(c_oMainTables.Core); this.WriteUChar(g_nodeAttributeStart); this._WriteString2(0, core.title); this._WriteString2(1, core.creator); this._WriteString2(2, core.lastModifiedBy); this._WriteString2(3, core.revision); this._WriteString2(4, core.created); this._WriteString2(5, core.modified); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); }; this.WriteViewProps = function (viewprops) { this.StartMainRecord(c_oMainTables.ViewProps); this.StartRecord(c_oMainTables.ViewProps); this.EndRecord(); }; this.WritePresentation = function (presentation) { var pres = presentation.pres; this.StartMainRecord(c_oMainTables.Presentation); this.StartRecord(c_oMainTables.Presentation); this.WriteUChar(g_nodeAttributeStart); this._WriteBool2(0, pres.attrAutoCompressPictures); this._WriteInt2(1, pres.attrBookmarkIdSeed); this._WriteBool2(2, pres.attrCompatMode); this._WriteLimit2(3, pres.attrConformance); this._WriteBool2(4, pres.attrEmbedTrueTypeFonts); this._WriteInt2(5, pres.attrFirstSlideNum); this._WriteBool2(6, pres.attrRemovePersonalInfoOnSave); this._WriteBool2(7, pres.attrRtl); this._WriteBool2(8, pres.attrSaveSubsetFonts); this._WriteString2(9, pres.attrServerZoom); this._WriteBool2(10, pres.attrShowSpecialPlsOnTitleSld); this._WriteBool2(11, pres.attrStrictFirstAndLastChars); this.WriteUChar(g_nodeAttributeEnd); this.WriteRecord2(0, presentation.defaultTextStyle, this.WriteTextListStyle); pres.SldSz.cx = (presentation.Width * c_dScalePPTXSizes) >> 0; pres.SldSz.cy = (presentation.Height * c_dScalePPTXSizes) >> 0; this.StartRecord(5); this.WriteUChar(g_nodeAttributeStart); this._WriteInt1(0, pres.SldSz.cx); this._WriteInt1(1, pres.SldSz.cy); this._WriteLimit2(2, pres.SldSz.type); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); pres.NotesSz = new Object(); pres.NotesSz.cx = (presentation.Height * c_dScalePPTXSizes) >> 0; pres.NotesSz.cy = (presentation.Width * c_dScalePPTXSizes) >> 0; this.StartRecord(3); this.WriteUChar(g_nodeAttributeStart); this._WriteInt1(0, pres.NotesSz.cx); this._WriteInt1(1, pres.NotesSz.cy); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); if (!this.IsUseFullUrl) { var _countAuthors = 0; for (var i in presentation.CommentAuthors) { ++_countAuthors; } if (_countAuthors > 0) { this.StartRecord(6); this.StartRecord(0); this.WriteULong(_countAuthors); for (var i in presentation.CommentAuthors) { var _author = presentation.CommentAuthors[i]; this.StartRecord(0); this.WriteUChar(g_nodeAttributeStart); this._WriteInt1(0, _author.Id); this._WriteInt1(1, _author.LastId); this._WriteInt1(2, _author.Id - 1); this._WriteString1(3, _author.Name); this._WriteString1(4, _author.Initials); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); } this.EndRecord(); this.EndRecord(); } } this.EndRecord(); }; this.WriteTheme = function (_theme) { this.StartRecord(c_oMainTables.Themes); this.WriteUChar(g_nodeAttributeStart); this._WriteString2(0, _theme.name); this.WriteUChar(g_nodeAttributeEnd); this.WriteRecord1(0, _theme.themeElements, this.WriteThemeElements); this.WriteRecord2(1, _theme.spDef, this.WriteDefaultShapeDefinition); this.WriteRecord2(2, _theme.lnDef, this.WriteDefaultShapeDefinition); this.WriteRecord2(3, _theme.txDef, this.WriteDefaultShapeDefinition); this.WriteRecordArray(4, 0, _theme.extraClrSchemeLst, this.WriteExtraClrScheme); this.EndRecord(); }; this.WriteSlideMaster = function (_master) { this.StartRecord(c_oMainTables.SlideMasters); this.WriteUChar(g_nodeAttributeStart); this._WriteBool2(0, _master.preserve); this.WriteUChar(g_nodeAttributeEnd); this.WriteRecord1(0, _master.cSld, this.WriteCSld); this.WriteRecord1(1, _master.clrMap, this.WriteClrMap); this.WriteRecord2(5, _master.hf, this.WriteHF); this.WriteRecord2(6, _master.txStyles, this.WriteTxStyles); this.EndRecord(); }; this.WriteSlideLayout = function (_layout) { this.StartRecord(c_oMainTables.SlideLayouts); this.WriteUChar(g_nodeAttributeStart); this._WriteString2(0, _layout.matchingName); this._WriteBool2(1, _layout.preserve); this._WriteBool2(2, _layout.showMasterPhAnim); this._WriteBool2(3, _layout.showMasterSp); this._WriteBool2(4, _layout.userDrawn); this._WriteLimit2(5, _layout.type); this.WriteUChar(g_nodeAttributeEnd); this.WriteRecord1(0, _layout.cSld, this.WriteCSld); this.WriteRecord2(1, _layout.clrMap, this.WriteClrMapOvr); this.WriteRecord2(4, _layout.hf, this.WriteHF); this.EndRecord(); }; this.WriteSlide = function (_slide) { this.StartRecord(c_oMainTables.Slides); this.WriteUChar(g_nodeAttributeStart); this._WriteBool2(0, _slide.show); this._WriteBool2(1, _slide.showMasterPhAnim); this._WriteBool2(2, _slide.showMasterSp); this.WriteUChar(g_nodeAttributeEnd); this.WriteRecord1(0, _slide.cSld, this.WriteCSld); this.WriteRecord2(1, _slide.clrMap, this.WriteClrMapOvr); this.WriteRecord1(2, _slide.timing, this.WriteSlideTransition); var _countComments = 0; if (!this.IsUseFullUrl) { for (var i in _slide.writecomments) { ++_countComments; } } if (_countComments > 0) { this.StartRecord(4); this.StartRecord(0); this.WriteULong(_countComments); for (var i in _slide.writecomments) { var _comment = _slide.writecomments[i]; this.StartRecord(0); this.WriteUChar(g_nodeAttributeStart); this._WriteInt1(0, _comment.WriteAuthorId); this._WriteString1(1, _comment.WriteTime); this._WriteInt1(2, _comment.WriteCommentId); this._WriteInt1(3, (_comment.x * 25.4) >> 0); this._WriteInt1(4, (_comment.y * 25.4) >> 0); this._WriteString1(5, _comment.Data.m_sText); if (0 != _comment.WriteParentAuthorId) { this._WriteInt1(6, _comment.WriteParentAuthorId); this._WriteInt1(7, _comment.WriteParentCommentId); } this._WriteString1(8, _comment.AdditionalData); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); } this.EndRecord(); this.EndRecord(); } this.EndRecord(); }; this.WriteSlideTransition = function (_timing) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteBool1(0, _timing.SlideAdvanceOnMouseClick); if (_timing.SlideAdvanceAfter) { oThis._WriteInt1(1, _timing.SlideAdvanceDuration); if (_timing.TransitionType == c_oAscSlideTransitionTypes.None) { oThis._WriteInt1(2, 0); } } else { if (_timing.TransitionType == c_oAscSlideTransitionTypes.None) { oThis._WriteInt1(2, 10); } } if (_timing.TransitionType != c_oAscSlideTransitionTypes.None) { oThis._WriteInt1(2, _timing.TransitionDuration); if (_timing.TransitionDuration < 250) { oThis._WriteUChar1(3, 0); } else { if (_timing.TransitionDuration > 1000) { oThis._WriteUChar1(3, 2); } else { oThis._WriteUChar1(3, 1); } } oThis.WriteUChar(g_nodeAttributeEnd); oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); switch (_timing.TransitionType) { case c_oAscSlideTransitionTypes.Fade: oThis._WriteString2(0, "p:fade"); switch (_timing.TransitionOption) { case c_oAscSlideTransitionParams.Fade_Smoothly: oThis._WriteString2(1, "thruBlk"); oThis._WriteString2(2, "0"); break; case c_oAscSlideTransitionParams.Fade_Through_Black: oThis._WriteString2(1, "thruBlk"); oThis._WriteString2(2, "1"); break; default: break; } break; case c_oAscSlideTransitionTypes.Push: oThis._WriteString2(0, "p:push"); switch (_timing.TransitionOption) { case c_oAscSlideTransitionParams.Param_Left: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "r"); break; case c_oAscSlideTransitionParams.Param_Right: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "l"); break; case c_oAscSlideTransitionParams.Param_Top: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "d"); break; case c_oAscSlideTransitionParams.Param_Bottom: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "u"); break; default: break; } break; case c_oAscSlideTransitionTypes.Wipe: switch (_timing.TransitionOption) { case c_oAscSlideTransitionParams.Param_Left: oThis._WriteString2(0, "p:wipe"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "r"); break; case c_oAscSlideTransitionParams.Param_Right: oThis._WriteString2(0, "p:wipe"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "l"); break; case c_oAscSlideTransitionParams.Param_Top: oThis._WriteString2(0, "p:wipe"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "d"); break; case c_oAscSlideTransitionParams.Param_Bottom: oThis._WriteString2(0, "p:wipe"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "u"); break; case c_oAscSlideTransitionParams.Param_TopLeft: oThis._WriteString2(0, "p:strips"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "rd"); break; case c_oAscSlideTransitionParams.Param_TopRight: oThis._WriteString2(0, "p:strips"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "ld"); break; case c_oAscSlideTransitionParams.Param_BottomLeft: oThis._WriteString2(0, "p:strips"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "ru"); break; case c_oAscSlideTransitionParams.Param_BottomRight: oThis._WriteString2(0, "p:strips"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "lu"); break; default: break; } break; case c_oAscSlideTransitionTypes.Split: oThis._WriteString2(0, "p:split"); switch (_timing.TransitionOption) { case c_oAscSlideTransitionParams.Split_HorizontalIn: oThis._WriteString2(1, "orient"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "horz"); oThis._WriteString2(2, "in"); break; case c_oAscSlideTransitionParams.Split_HorizontalOut: oThis._WriteString2(1, "orient"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "horz"); oThis._WriteString2(2, "out"); break; case c_oAscSlideTransitionParams.Split_VerticalIn: oThis._WriteString2(1, "orient"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "vert"); oThis._WriteString2(2, "in"); break; case c_oAscSlideTransitionParams.Split_VerticalOut: oThis._WriteString2(1, "orient"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "vert"); oThis._WriteString2(2, "out"); break; default: break; } break; case c_oAscSlideTransitionTypes.UnCover: case c_oAscSlideTransitionTypes.Cover: if (_timing.TransitionType == c_oAscSlideTransitionTypes.Cover) { oThis._WriteString2(0, "p:cover"); } else { oThis._WriteString2(0, "p:pull"); } switch (_timing.TransitionOption) { case c_oAscSlideTransitionParams.Param_Left: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "r"); break; case c_oAscSlideTransitionParams.Param_Right: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "l"); break; case c_oAscSlideTransitionParams.Param_Top: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "d"); break; case c_oAscSlideTransitionParams.Param_Bottom: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "u"); break; case c_oAscSlideTransitionParams.Param_TopLeft: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "rd"); break; case c_oAscSlideTransitionParams.Param_TopRight: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "ld"); break; case c_oAscSlideTransitionParams.Param_BottomLeft: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "ru"); break; case c_oAscSlideTransitionParams.Param_BottomRight: oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "lu"); break; default: break; } break; case c_oAscSlideTransitionTypes.Clock: switch (_timing.TransitionOption) { case c_oAscSlideTransitionParams.Clock_Clockwise: oThis._WriteString2(0, "p:wheel"); oThis._WriteString2(1, "spokes"); oThis._WriteString2(2, "1"); break; case c_oAscSlideTransitionParams.Clock_Counterclockwise: oThis._WriteString2(0, "p14:wheelReverse"); oThis._WriteString2(1, "spokes"); oThis._WriteString2(2, "1"); break; case c_oAscSlideTransitionParams.Clock_Wedge: oThis._WriteString2(0, "p:wedge"); break; default: break; } break; case c_oAscSlideTransitionTypes.Zoom: switch (_timing.TransitionOption) { case c_oAscSlideTransitionParams.Zoom_In: oThis._WriteString2(0, "p14:warp"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "in"); break; case c_oAscSlideTransitionParams.Zoom_Out: oThis._WriteString2(0, "p14:warp"); oThis._WriteString2(1, "dir"); oThis._WriteString2(2, "out"); break; case c_oAscSlideTransitionParams.Zoom_AndRotate: oThis._WriteString2(0, "p:newsflash"); break; default: break; } break; default: break; } oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } else { oThis.WriteUChar(g_nodeAttributeEnd); } }; this.WriteSlideNote = function (_note) { this.StartRecord(c_oMainTables.NotesSlides); this.WriteUChar(g_nodeAttributeStart); this._WriteBool2(0, _note.showMasterPhAnim); this._WriteBool2(1, _note.showMasterSp); this.WriteUChar(g_nodeAttributeEnd); this.WriteRecord1(0, _note.cSld, this.WriteCSld); this.WriteRecord2(1, _note.clrMap, this.WriteClrMapOvr); this.EndRecord(); }; this.WriteNoteMaster = function (_master) { this.StartRecord(c_oMainTables.NotesMasters); this.WriteRecord1(0, _master.cSld, this.WriteCSld); this.WriteRecord1(1, _master.clrMap, this.WriteClrMap); this.WriteRecord2(2, _master.hf, this.WriteHF); this.WriteRecord2(3, _master.notesStyle, this.WriteTextListStyle); this.EndRecord(); }; this.WriteThemeElements = function (themeElements) { oThis.WriteRecord1(0, themeElements.clrScheme, oThis.WriteClrScheme); oThis.WriteRecord1(1, themeElements.fontScheme, oThis.WriteFontScheme); oThis.WriteRecord1(2, themeElements.fmtScheme, oThis.WriteFmtScheme); }; this.WriteFontScheme = function (fontScheme) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, fontScheme.name); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord1(0, fontScheme.majorFont, oThis.WriteFontCollection); oThis.WriteRecord1(1, fontScheme.minorFont, oThis.WriteFontCollection); }; this.WriteFontCollection = function (coll) { oThis.WriteRecord1(0, { Name: coll.latin, Index: -1 }, oThis.WriteTextFontTypeface); oThis.WriteRecord1(1, { Name: coll.ea, Index: -1 }, oThis.WriteTextFontTypeface); oThis.WriteRecord1(2, { Name: coll.cs, Index: -1 }, oThis.WriteTextFontTypeface); }; this.WriteFmtScheme = function (fmt) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, fmt.name); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecordArray(0, 0, fmt.fillStyleLst, oThis.WriteUniFill); oThis.WriteRecordArray(1, 0, fmt.lnStyleLst, oThis.WriteLn); oThis.WriteRecordArray(3, 0, fmt.bgFillStyleLst, oThis.WriteUniFill); }; this.WriteDefaultShapeDefinition = function (shapeDef) { oThis.WriteRecord1(0, shapeDef.spPr, oThis.WriteSpPr); oThis.WriteRecord1(1, shapeDef.bodyPr, oThis.WriteBodyPr); oThis.WriteRecord1(2, shapeDef.lstStyle, oThis.WriteTextListStyle); oThis.WriteRecord2(3, shapeDef.style, oThis.WriteShapeStyle); }; this.WriteExtraClrScheme = function (extraScheme) { oThis.WriteRecord1(0, extraScheme.clrScheme, oThis.WriteClrScheme); oThis.WriteRecord2(1, extraScheme.clrMap, oThis.WriteClrMap); }; this.WriteCSld = function (cSld) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString2(0, cSld.name); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord2(0, cSld.Bg, oThis.WriteBg); var spTree = cSld.spTree; var _len = spTree.length; oThis.StartRecord(1); oThis.StartRecord(4); var uniPr = new UniNvPr(); uniPr.cNvPr.id = 1; uniPr.cNvPr.name = ""; var spPr = new CSpPr(); spPr.xfrm.offX = 0; spPr.xfrm.offY = 0; spPr.xfrm.extX = 0; spPr.xfrm.extY = 0; spPr.xfrm.chOffX = 0; spPr.xfrm.chOffY = 0; spPr.xfrm.chExtX = 0; spPr.xfrm.chExtY = 0; spPr.WriteXfrm = spPr.xfrm; oThis.WriteRecord1(0, uniPr, oThis.WriteUniNvPr); oThis.WriteRecord1(1, spPr, oThis.WriteSpPr); if (0 != _len) { oThis.StartRecord(2); oThis.WriteULong(_len); for (var i = 0; i < _len; i++) { oThis.StartRecord(0); if (spTree[i] instanceof CShape) { oThis.WriteShape(spTree[i]); } else { if (spTree[i] instanceof CImageShape) { oThis.WriteImage(spTree[i]); } else { if (spTree[i] instanceof CGroupShape) { oThis.WriteGroupShape(spTree[i]); } else { if (spTree[i] instanceof CGraphicFrame && spTree[i].graphicObject instanceof CTable) { oThis.WriteTable(spTree[i]); } else { if (typeof CChartAsGroup != "undefined" && spTree[i] instanceof CChartAsGroup) { oThis.WriteChart(spTree[i]); } } } } } oThis.EndRecord(); } oThis.EndRecord(); } oThis.EndRecord(); oThis.EndRecord(); }; this.WriteClrMap = function (clrmap) { oThis.WriteUChar(g_nodeAttributeStart); var _len = clrmap.color_map.length; for (var i = 0; i < _len; ++i) { if (null != clrmap.color_map[i]) { oThis.WriteUChar(i); oThis.WriteUChar(clrmap.color_map[i]); } } oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteClrScheme = function (scheme) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, scheme.name); oThis.WriteUChar(g_nodeAttributeEnd); var _len = scheme.colors.length; for (var i = 0; i < _len; i++) { if (null != scheme.colors[i]) { oThis.WriteRecord1(i, scheme.colors[i], oThis.WriteUniColor); } } }; this.WriteClrMapOvr = function (clrmapovr) { oThis.WriteRecord2(0, clrmapovr, oThis.WriteClrMap); }; this.WriteHF = function (hf) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteBool2(0, hf.dt); oThis._WriteBool2(1, hf.ftr); oThis._WriteBool2(2, hf.hdr); oThis._WriteBool2(3, hf.sldNum); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteTxStyles = function (txStyles) { oThis.WriteRecord2(0, txStyles.titleStyle, oThis.WriteTextListStyle); oThis.WriteRecord2(1, txStyles.bodyStyle, oThis.WriteTextListStyle); oThis.WriteRecord2(2, txStyles.otherStyle, oThis.WriteTextListStyle); }; this.WriteTextListStyle = function (styles) { var _levels = styles.levels; var _count = _levels.length; for (var i = 0; i < _count; ++i) { oThis.WriteRecord2(i, _levels[i], oThis.WriteTextParagraphPr); } }; this.WriteTextParagraphPr = function (tPr) { oThis.WriteUChar(g_nodeAttributeStart); var pPr = tPr.pPr; if (undefined !== pPr && null != pPr) { switch (pPr.Jc) { case align_Left: oThis._WriteUChar1(0, 4); break; case align_Center: oThis._WriteUChar1(0, 0); break; case align_Right: oThis._WriteUChar1(0, 5); break; case align_Justify: oThis._WriteUChar1(0, 2); break; default: break; } var ind = pPr.Ind; if (ind !== undefined && ind != null) { if (ind.FirstLine !== undefined) { oThis._WriteInt1(5, ind.FirstLine * 36000); } if (ind.Left !== undefined) { oThis._WriteInt1(8, ind.Left * 36000); } if (ind.Right !== undefined) { oThis._WriteInt1(9, ind.Right * 36000); } } } if (tPr.lvl !== undefined && tPr.lvl != null) { oThis._WriteInt1(7, tPr.lvl); } oThis.WriteUChar(g_nodeAttributeEnd); if (undefined !== pPr && null != pPr) { var spacing = pPr.Spacing; if (spacing !== undefined && spacing != null) { switch (spacing.LineRule) { case linerule_Auto: oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, (spacing.Line * 100000) >> 0); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); break; case linerule_Exact: oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(1, (spacing.Line / 0.00352777778) >> 0); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); break; default: break; } if (spacing.After !== undefined) { oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(1, (spacing.After / 0.00352777778) >> 0); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } if (spacing.Before !== undefined) { oThis.StartRecord(2); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(1, (spacing.Before / 0.00352777778) >> 0); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } } } var bullet = tPr.bullet; if (undefined !== bullet && null != bullet) { if (bullet.bulletColor != null && bullet.bulletColor.type != BULLET_TYPE_COLOR_NONE) { oThis.StartRecord(3); if (bullet.bulletColor.type == BULLET_TYPE_COLOR_CLR) { oThis.StartRecord(BULLET_TYPE_COLOR_CLR); oThis.WriteRecord2(0, bullet.bulletColor.UniColor, oThis.WriteUniColor); oThis.EndRecord(); } else { oThis.StartRecord(BULLET_TYPE_COLOR_CLRTX); oThis.EndRecord(); } oThis.EndRecord(); } if (bullet.bulletSize != null && bullet.bulletSize.type != BULLET_TYPE_SIZE_NONE) { oThis.StartRecord(4); if (bullet.bulletSize.type == BULLET_TYPE_SIZE_PTS) { oThis.StartRecord(BULLET_TYPE_SIZE_PTS); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, bullet.bulletSize.val); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } else { if (bullet.bulletSize.type == BULLET_TYPE_SIZE_PCT) { oThis.StartRecord(BULLET_TYPE_SIZE_PCT); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, bullet.bulletSize.val); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } else { oThis.StartRecord(BULLET_TYPE_SIZE_TX); oThis.EndRecord(); } } oThis.EndRecord(); } if (bullet.bulletTypeface != null && bullet.bulletTypeface.type != null && bullet.bulletTypeface.type != BULLET_TYPE_TYPEFACE_NONE) { oThis.StartRecord(5); if (bullet.bulletTypeface.type == BULLET_TYPE_TYPEFACE_BUFONT) { oThis.WriteRecord2(BULLET_TYPE_TYPEFACE_BUFONT, { Name: bullet.bulletTypeface.typeface, Index: -1 }, oThis.WriteTextFontTypeface); } else { oThis.StartRecord(BULLET_TYPE_TYPEFACE_TX); oThis.EndRecord(); } oThis.EndRecord(); } if (bullet.bulletType != null && bullet.bulletType.type != null) { oThis.StartRecord(6); switch (bullet.bulletType.type) { case BULLET_TYPE_BULLET_CHAR: oThis.StartRecord(BULLET_TYPE_BULLET_CHAR); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, bullet.bulletType.Char); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); break; case BULLET_TYPE_BULLET_BLIP: oThis.StartRecord(BULLET_TYPE_BULLET_CHAR); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, "*"); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); break; case BULLET_TYPE_BULLET_AUTONUM: oThis.StartRecord(BULLET_TYPE_BULLET_AUTONUM); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteLimit1(0, bullet.bulletType.AutoNumType); oThis._WriteInt2(1, bullet.bulletType.startAt); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); break; case BULLET_TYPE_BULLET_NONE: oThis.StartRecord(BULLET_TYPE_BULLET_NONE); oThis.EndRecord(); break; } oThis.EndRecord(); } } if (pPr !== undefined && pPr != null && pPr.Tabs !== undefined && pPr.Tabs != null) { if (pPr.Tabs.Tabs != undefined && pPr.Tabs.Tabs != null) { oThis.WriteRecordArray(7, 0, pPr.Tabs.Tabs, oThis.WriteTab); } } if (tPr !== undefined && tPr != null) { oThis.WriteRecord2(8, tPr.rPr, oThis.WriteRunProperties); } }; this.WriteRunProperties = function (rPr, hlinkObj) { if (rPr == null || rPr === undefined) { return; } oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteBool2(1, rPr.Bold); oThis._WriteBool2(7, rPr.Italic); var _cap = null; if (rPr.Caps === true) { _cap = 0; } else { if (rPr.SmallCaps === true) { _cap = 1; } else { if (rPr.Caps === false && rPr.SmallCaps === false) { _cap = 2; } } } if (null != _cap) { oThis._WriteUChar1(4, _cap); } var _strike = null; if (rPr.DStrikeout === true) { _strike = 0; } else { if (rPr.Strikeout === true) { _strike = 2; } else { if (rPr.DStrikeout === false && rPr.Strikeout === false) { _strike = 1; } } } if (null != _strike) { oThis._WriteUChar1(16, _strike); } if (undefined !== rPr.Underline && null != rPr.Underline) { oThis._WriteUChar1(18, (rPr.Underline === true) ? 13 : 12); } if (undefined !== rPr.FontSize && null != rPr.FontSize) { oThis._WriteInt1(17, rPr.FontSize * 100); } if (vertalign_SubScript == rPr.VertAlign) { oThis._WriteInt1(2, -25000); } else { if (vertalign_SuperScript == rPr.VertAlign) { oThis._WriteInt1(2, 30000); } } oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord1(1, rPr.unifill, oThis.WriteUniFill); oThis.WriteRecord2(3, rPr.FontFamily, oThis.WriteTextFontTypeface); if (hlinkObj != null && hlinkObj !== undefined) { oThis.WriteRecord1(7, hlinkObj, oThis.WriteHyperlink); } }; this.WriteHyperlink = function (hlink) { oThis.WriteUChar(g_nodeAttributeStart); var url = hlink.Value; var action = null; if (url == "ppaction://hlinkshowjump?jump=firstslide") { action = url; url = ""; } else { if (url == "ppaction://hlinkshowjump?jump=lastslide") { action = url; url = ""; } else { if (url == "ppaction://hlinkshowjump?jump=nextslide") { action = url; url = ""; } else { if (url == "ppaction://hlinkshowjump?jump=previousslide") { action = url; url = ""; } else { var mask = "ppaction://hlinksldjumpslide"; var indSlide = url.indexOf(mask); if (0 == indSlide) { var slideNum = parseInt(url.substring(mask.length)); url = "slide" + (slideNum + 1) + ".xml"; action = "ppaction://hlinksldjump"; } } } } } oThis._WriteString1(0, url); oThis._WriteString2(2, action); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteTextFontTypeface = function (typeface) { oThis.WriteUChar(g_nodeAttributeStart); if (!typeface || typeface.Name == null) { oThis.font_map["Arial"] = true; oThis._WriteString1(3, "Arial"); oThis.WriteUChar(g_nodeAttributeEnd); return; } if ((0 != typeface.Name.indexOf("+mj")) && (0 != typeface.Name.indexOf("+mn"))) { oThis.font_map[typeface.Name] = true; } oThis._WriteString1(3, typeface.Name); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteTab = function (tab) { oThis.WriteUChar(g_nodeAttributeStart); var _algn = 2; if (tab.Value == tab_Center) { _algn = 0; } else { if (tab.Value == tab_Right) { _algn = 3; } } oThis._WriteLimit2(0, _algn); if (tab.Pos != undefined && tab.Pos != null) { oThis._WriteInt1(1, tab.Pos * 36000); } oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteBodyPr = function (bodyPr) { if (undefined === bodyPr || null == bodyPr) { return; } oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt2(0, bodyPr.flatTx); oThis._WriteLimit2(1, bodyPr.anchor); oThis._WriteBool2(2, bodyPr.anchorCtr); oThis._WriteInt4(3, bodyPr.bIns, 36000); oThis._WriteBool2(4, bodyPr.compatLnSpc); oThis._WriteBool2(5, bodyPr.forceAA); oThis._WriteBool2(6, bodyPr.fromWordArt); oThis._WriteLimit2(7, bodyPr.horzOverflow); oThis._WriteInt4(8, bodyPr.lIns, 36000); oThis._WriteInt2(9, bodyPr.numCol); oThis._WriteInt4(10, bodyPr.rIns, 36000); oThis._WriteInt2(11, bodyPr.rot); oThis._WriteBool2(12, bodyPr.rtlCol); oThis._WriteInt2(13, bodyPr.spcCol); oThis._WriteBool2(14, bodyPr.spcFirstLastPara); oThis._WriteInt4(15, bodyPr.tIns, 36000); oThis._WriteBool2(16, bodyPr.upright); oThis._WriteLimit2(17, bodyPr.vert); oThis._WriteLimit2(18, bodyPr.vertOverflow); oThis._WriteLimit2(19, bodyPr.wrap); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteUniColor = function (unicolor) { if (undefined === unicolor || null == unicolor || unicolor.color == null) { return; } var color = unicolor.color; switch (color.type) { case COLOR_TYPE_PRST: oThis.StartRecord(COLOR_TYPE_PRST); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, color.id); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteMods(unicolor.Mods); oThis.EndRecord(); break; case COLOR_TYPE_SCHEME: oThis.StartRecord(COLOR_TYPE_SCHEME); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteUChar1(0, color.id); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteMods(unicolor.Mods); oThis.EndRecord(); break; case COLOR_TYPE_SRGB: oThis.StartRecord(COLOR_TYPE_SRGB); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteUChar1(0, color.RGBA.R); oThis._WriteUChar1(1, color.RGBA.G); oThis._WriteUChar1(2, color.RGBA.B); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteMods(unicolor.Mods); oThis.EndRecord(); break; case COLOR_TYPE_SYS: oThis.StartRecord(COLOR_TYPE_SYS); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, color.id); oThis._WriteUChar1(1, color.RGBA.R); oThis._WriteUChar1(2, color.RGBA.G); oThis._WriteUChar1(3, color.RGBA.B); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteMods(unicolor.Mods); oThis.EndRecord(); break; } }; this.WriteMods = function (mods) { var _count = mods.Mods.length; if (0 == _count) { return; } oThis.StartRecord(0); oThis.WriteULong(_count); for (var i = 0; i < _count; ++i) { oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, mods.Mods[i].name); oThis._WriteInt2(1, mods.Mods[i].val); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } oThis.EndRecord(); }; this.CorrectUniColorAlpha = function (color, trans) { var mods = color.Mods.Mods; var _len = mods.length; if (trans != null) { var nIndex = -1; for (var i = 0; i < _len; i++) { if (mods[i].name == "alpha") { nIndex = i; break; } } if (-1 != nIndex) { --_len; mods.splice(nIndex, 1); } mods[_len] = new CColorMod(); mods[_len].name = "alpha"; mods[_len].val = (trans * 100000 / 255) >> 0; } }; this.WriteUniFill = function (unifill) { if (undefined === unifill || null == unifill) { return; } var trans = ((unifill.transparent != null) && (unifill.transparent != 255)) ? unifill.transparent : null; var fill = unifill.fill; if (undefined === fill || null == fill) { return; } switch (fill.type) { case FILL_TYPE_NOFILL: oThis.StartRecord(FILL_TYPE_NOFILL); oThis.EndRecord(); break; case FILL_TYPE_GRAD: oThis.StartRecord(FILL_TYPE_GRAD); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); oThis.StartRecord(0); var len = fill.colors.length; oThis.WriteULong(len); for (var i = 0; i < len; i++) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, fill.colors[i].pos); oThis.WriteUChar(g_nodeAttributeEnd); oThis.CorrectUniColorAlpha(fill.colors[i].color, trans); oThis.WriteRecord1(0, fill.colors[i].color, oThis.WriteUniColor); oThis.EndRecord(); } oThis.EndRecord(); if (fill.lin) { oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, fill.lin.angle); oThis._WriteBool1(1, fill.lin.scale); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } else { if (fill.path) { oThis.StartRecord(2); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteUChar1(0, fill.path.path); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } } oThis.EndRecord(); break; case FILL_TYPE_PATT: oThis.StartRecord(FILL_TYPE_PATT); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteLimit2(0, fill.ftype); oThis.WriteUChar(g_nodeAttributeEnd); oThis.CorrectUniColorAlpha(fill.fgClr, trans); oThis.CorrectUniColorAlpha(fill.bgClr, trans); oThis.WriteRecord1(0, fill.fgClr, oThis.WriteUniColor); oThis.WriteRecord1(1, fill.bgClr, oThis.WriteUniColor); oThis.EndRecord(); break; case FILL_TYPE_BLIP: oThis.StartRecord(FILL_TYPE_BLIP); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); var api_sheet = window["Asc"]["editor"]; var sFindString; if (api_sheet) { sFindString = api_sheet.wbModel.sUrlPath + "media/"; } else { sFindString = editor.DocumentUrl + "media/"; } var _src = fill.RasterImageId; if (0 == _src.indexOf(sFindString)) { _src = _src.substring(sFindString.length); } oThis.image_map[_src] = true; if (oThis.IsUseFullUrl) { if ((0 == _src.indexOf("theme")) && window.editor) { _src = oThis.PresentationThemesOrigin + _src; } else { if (0 != _src.indexOf("http:") && 0 != _src.indexOf("data:") && 0 != _src.indexOf("https:") && 0 != _src.indexOf("ftp:") && 0 != _src.indexOf("file:")) { _src = oThis.DocumentOrigin + "media/" + _src; } } } oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); if (null != trans) { oThis.StartRecord(2); oThis.WriteULong(1); oThis.StartRecord(3); oThis.StartRecord(21); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, (trans * 100000 / 255) >> 0); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); oThis.EndRecord(); oThis.EndRecord(); } oThis.StartRecord(3); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, _src); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); oThis.EndRecord(); if (fill.srcRect != null) { oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); if (fill.srcRect.l != null) { var _num = (fill.srcRect.l * 1000) >> 0; oThis._WriteString1(0, "" + _num); } if (fill.srcRect.t != null) { var _num = (fill.srcRect.t * 1000) >> 0; oThis._WriteString1(1, "" + _num); } if (fill.srcRect.l != null) { var _num = ((100 - fill.srcRect.r) * 1000) >> 0; oThis._WriteString1(2, "" + _num); } if (fill.srcRect.l != null) { var _num = ((100 - fill.srcRect.b) * 1000) >> 0; oThis._WriteString1(3, "" + _num); } oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } if (true === fill.tile) { oThis.StartRecord(2); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } else { oThis.StartRecord(3); oThis.EndRecord(); } oThis.EndRecord(); break; case FILL_TYPE_SOLID: oThis.StartRecord(FILL_TYPE_SOLID); oThis.CorrectUniColorAlpha(fill.color, trans); oThis.WriteRecord1(0, fill.color, oThis.WriteUniColor); oThis.EndRecord(); break; default: break; } }; this.WriteLn = function (ln) { if (undefined === ln || null == ln) {<|fim▁hole|> oThis._WriteLimit2(1, ln.cap); oThis._WriteLimit2(2, ln.cmpd); oThis._WriteInt2(3, ln.w); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord2(0, ln.Fill, oThis.WriteUniFill); oThis.WriteRecord1(2, ln.Join, oThis.WriteLineJoin); oThis.WriteRecord2(3, ln.headEnd, oThis.WriteLineEnd); oThis.WriteRecord2(4, ln.tailEnd, oThis.WriteLineEnd); }; this.WriteLineJoin = function (join) { if (join == null || join === undefined) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, 0); oThis.WriteUChar(g_nodeAttributeEnd); return; } oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, (join.type != null && join.type !== undefined) ? join.type : 0); oThis._WriteInt2(1, join.limit); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteLineEnd = function (end) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteLimit2(0, end.type); oThis._WriteLimit2(1, end.w); oThis._WriteLimit2(2, end.len); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteTxBody = function (txBody) { if (txBody.bodyPr) { oThis.WriteRecord2(0, txBody.bodyPr, oThis.WriteBodyPr); } if (txBody.lstStyle) { oThis.WriteRecord2(1, txBody.lstStyle, oThis.WriteTextListStyle); } var _content = txBody.content.Content; oThis.WriteRecordArray(2, 0, _content, oThis.WriteParagraph); }; this.WriteParagraph = function (paragraph, startPos, endPos) { var tPr = new CTextParagraphPr(); if (paragraph.bullet) { tPr.bullet = paragraph.bullet; } tPr.lvl = paragraph.PresentationPr.Level; tPr.pPr = paragraph.Pr; tPr.rPr = null; if (tPr.rPr == null) { tPr.rPr = new CTextPr(); } oThis.WriteRecord1(0, tPr, oThis.WriteTextParagraphPr); oThis.WriteRecord2(1, paragraph.TextPr.Value, oThis.WriteRunProperties); oThis.StartRecord(2); var _position = oThis.pos; oThis.WriteULong(0); var _count = 0; var _par_content = paragraph.Content; var start_pos = startPos != null ? startPos : 0; var end_pos = endPos != undefined ? endPos : _par_content.length; if (paragraph.f_id != undefined || paragraph.f_type != undefined || paragraph.f_text != undefined) { oThis.StartRecord(0); oThis.WriteParagraphField(paragraph.f_id, paragraph.f_type, paragraph.f_text); oThis.EndRecord(); _count++; } var _content_index; var _cur_run_text = ""; _content_index = start_pos; var _cur_run_text_pr = null; var hlinkObj = null; while (_content_index < end_pos) { switch (_par_content[_content_index].Type) { case para_Text: _cur_run_text += _par_content[_content_index].Value; break; case para_Space: _cur_run_text += " "; break; case para_Tab: _cur_run_text += "\t"; break; case para_TextPr: if (("" != _cur_run_text) || (null != _cur_run_text_pr)) { oThis.StartRecord(0); oThis.WriteTextRun((null == _cur_run_text_pr) ? null : _cur_run_text_pr.Value, _cur_run_text, hlinkObj); oThis.EndRecord(); _count++; _cur_run_text = ""; _cur_run_text_pr = null; } _cur_run_text_pr = _par_content[_content_index]; break; case para_NewLine: if (("" != _cur_run_text) || (null != _cur_run_text_pr)) { oThis.StartRecord(0); oThis.WriteTextRun((null == _cur_run_text_pr) ? null : _cur_run_text_pr.Value, _cur_run_text, hlinkObj); oThis.EndRecord(); _count++; _cur_run_text = ""; _cur_run_text_pr = null; } oThis.StartRecord(0); oThis.WriteLineBreak(_cur_run_text_pr, hlinkObj); oThis.EndRecord(); _count++; break; case para_HyperlinkStart: if ("" != _cur_run_text) { oThis.StartRecord(0); oThis.WriteTextRun((null == _cur_run_text_pr) ? null : _cur_run_text_pr.Value, _cur_run_text, hlinkObj); oThis.EndRecord(); _count++; _cur_run_text = ""; } hlinkObj = _par_content[_content_index]; break; case para_HyperlinkEnd: if ("" != _cur_run_text) { oThis.StartRecord(0); oThis.WriteTextRun((null == _cur_run_text_pr) ? null : _cur_run_text_pr.Value, _cur_run_text, hlinkObj); oThis.EndRecord(); _count++; _cur_run_text = ""; } hlinkObj = null; break; } _content_index++; } if (_cur_run_text.length > 0) { oThis.StartRecord(0); oThis.WriteTextRun((null == _cur_run_text_pr) ? null : _cur_run_text_pr.Value, _cur_run_text, hlinkObj); oThis.EndRecord(); _count++; } var _new_pos = oThis.pos; oThis.pos = _position; oThis.WriteULong(_count); oThis.pos = _new_pos; oThis.EndRecord(); }; this.WriteParagraphField = function (id, type, text) { oThis.StartRecord(PARRUN_TYPE_FLD); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, id); oThis._WriteString2(1, type); oThis._WriteString2(2, text); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); }; this.WriteTextRun = function (runPr, text, hlinkObj) { oThis.StartRecord(PARRUN_TYPE_RUN); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString2(0, text); oThis.WriteUChar(g_nodeAttributeEnd); if (runPr !== undefined && runPr != null) { oThis.StartRecord(0); oThis.WriteRunProperties(runPr, hlinkObj); oThis.EndRecord(); } oThis.EndRecord(); }; this.WriteLineBreak = function (runPr, hlinkObj) { oThis.StartRecord(PARRUN_TYPE_BR); if (runPr !== undefined && runPr != null) { oThis.StartRecord(0); oThis.WriteRunProperties(runPr, hlinkObj); oThis.EndRecord(); } oThis.EndRecord(); }; this.WriteShapeStyle = function (style) { oThis.WriteRecord1(0, style.lnRef, oThis.WriteStyleRef); oThis.WriteRecord1(1, style.fillRef, oThis.WriteStyleRef); oThis.WriteRecord1(2, style.effectRef, oThis.WriteStyleRef); oThis.WriteRecord1(3, style.fontRef, oThis.WriteFontRef); }; this.WriteStyleRef = function (ref) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt2(0, ref.idx); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord1(0, ref.Color, oThis.WriteUniColor); }; this.WriteFontRef = function (ref) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteUChar2(0, ref.idx); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord1(0, ref.Color, oThis.WriteUniColor); }; this.WriteBg = function (bg) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteLimit2(0, bg.bwMode); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord2(0, bg.bgPr, oThis.WriteBgPr); oThis.WriteRecord2(1, bg.bgRef, oThis.WriteStyleRef); }; this.WriteBgPr = function (bgPr) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteBool2(0, bgPr.shadeToTitle); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord1(0, bgPr.Fill, oThis.WriteUniFill); }; this.WriteShape = function (shape) { oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteBool2(0, shape.attrUseBgFill); oThis.WriteUChar(g_nodeAttributeEnd); shape.spPr.WriteXfrm = shape.spPr.xfrm; var tmpFill = shape.spPr.Fill; var isUseTmpFill = false; if (tmpFill !== undefined && tmpFill != null) { var trans = ((tmpFill.transparent != null) && (tmpFill.transparent != 255)) ? tmpFill.transparent : null; if (trans != null) { if (tmpFill.fill === undefined || tmpFill.fill == null) { isUseTmpFill = true; shape.spPr.Fill = shape.brush; } } } oThis.WriteRecord2(0, shape.nvSpPr, oThis.WriteUniNvPr); oThis.WriteRecord1(1, shape.spPr, oThis.WriteSpPr); oThis.WriteRecord2(2, shape.style, oThis.WriteShapeStyle); oThis.WriteRecord2(3, shape.txBody, oThis.WriteTxBody); if (isUseTmpFill) { shape.spPr.Fill = tmpFill; } shape.spPr.WriteXfrm = null; oThis.EndRecord(); }; this.WriteImage = function (image) { oThis.StartRecord(2); oThis.WriteRecord1(0, image.nvPicPr, this.WriteUniNvPr); image.spPr.WriteXfrm = image.spPr.xfrm; if (image.spPr.geometry === undefined || image.spPr.geometry == null) { image.spPr.geometry = CreateGeometry("rect"); } oThis.WriteRecord1(1, image.blipFill, oThis.WriteUniFill); oThis.WriteRecord1(2, image.spPr, oThis.WriteSpPr); oThis.WriteRecord2(3, image.style, oThis.WriteShapeStyle); image.spPr.WriteXfrm = null; oThis.EndRecord(); }; this.WriteTable = function (grObj) { oThis.StartRecord(5); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord1(0, grObj.nvGraphicFramePr, oThis.WriteUniNvPr); if (grObj.spPr.xfrm && grObj.spPr.xfrm.isNotNull()) { oThis.WriteRecord2(1, grObj.spPr.xfrm, oThis.WriteXfrm); } oThis.WriteRecord2(2, grObj.graphicObject, oThis.WriteTable2); oThis.EndRecord(); }; this.WriteChart = function (grObj) { oThis.StartRecord(5); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); if (grObj.spPr.xfrm && grObj.spPr.xfrm.isNotNull()) { oThis.WriteRecord2(1, grObj.spPr.xfrm, oThis.WriteXfrm); } oThis.WriteRecord2(3, grObj, oThis.WriteChart2); oThis.EndRecord(); }; this.WriteChart2 = function (grObj) { var _memory = new CMemory(true); _memory.ImData = oThis.ImData; _memory.data = oThis.data; _memory.len = oThis.len; _memory.pos = oThis.pos; oThis.UseContinueWriter = true; var oBinaryChartWriter = new BinaryChartWriter(_memory); oBinaryChartWriter.WriteChartContent(grObj); oThis.ImData = _memory.ImData; oThis.data = _memory.data; oThis.len = _memory.len; oThis.pos = _memory.pos; oThis.UseContinueWriter = false; _memory.ImData = null; _memory.data = null; }; this.WriteTable2 = function (table) { var obj = new Object(); obj.props = table.Pr; obj.look = table.TableLook; obj.style = table.styleIndex; oThis.WriteRecord1(0, obj, oThis.WriteTableProps); var grid = table.TableGrid; var _len = grid.length; oThis.StartRecord(1); oThis.WriteULong(_len); for (var i = 0; i < _len; i++) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, (grid[i] * 36000) >> 0); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } oThis.EndRecord(); oThis.StartRecord(2); var rows_c = table.Content.length; oThis.WriteULong(rows_c); var _grid = oThis.GenerateTableWriteGrid(table); for (var i = 0; i < rows_c; i++) { oThis.StartRecord(0); oThis.WriteTableRow(table.Content[i], _grid.Rows[i]); oThis.EndRecord(); } oThis.EndRecord(); }; this.GenerateTableWriteGrid = function (table) { var TableGrid = new Object(); var _rows = table.Content; var _cols = table.TableGrid; var _cols_count = _cols.length; var _rows_count = _rows.length; TableGrid.Rows = new Array(_rows_count); for (var i = 0; i < _rows_count; i++) { TableGrid.Rows[i] = new Object(); TableGrid.Rows[i].Cells = new Array(); var _index = 0; var _cells_len = _rows[i].Content.length; for (var j = 0; j < _cells_len; j++) { var _cell = _rows[i].Content[j]; var _cell_info = new Object(); _cell_info.Cell = _cell; _cell_info.row_span = 1; _cell_info.grid_span = (_cell.Pr.GridSpan === undefined || _cell.Pr.GridSpan == null) ? 1 : _cell.Pr.GridSpan; _cell_info.hMerge = false; _cell_info.vMerge = false; _cell_info.isEmpty = false; if (_cell.Pr.VMerge == vmerge_Continue) { _cell_info.vMerge = true; } TableGrid.Rows[i].Cells.push(_cell_info); if (_cell_info.grid_span > 1) { for (var t = _cell_info.grid_span - 1; t > 0; t--) { var _cell_info_empty = new Object(); _cell_info_empty.isEmpty = true; _cell_info_empty.vMerge = _cell_info.vMerge; TableGrid.Rows[i].Cells.push(_cell_info_empty); } } } } for (var i = 0; i < _cols_count; i++) { var _index = 0; while (_index < _rows_count) { var _count = 1; for (var j = _index + 1; j < _rows_count; j++) { if (TableGrid.Rows[j].Cells[i].vMerge !== true) { break; }++_count; } TableGrid.Rows[_index].Cells[i].row_span = _count; _index += _count + 1; } } return TableGrid; }; this.WriteEmptyTableCell = function (_info) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteBool1(3, true); if (true == _info.vMerge) { oThis._WriteBool1(4, true); } oThis.WriteUChar(g_nodeAttributeEnd); oThis.StartRecord(1); oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); oThis.StartRecord(2); oThis.WriteULong(1); oThis.StartRecord(0); oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); oThis.EndRecord(); oThis.EndRecord(); oThis.EndRecord(); }; this.WriteTableRow = function (row, row_info) { oThis.WriteUChar(g_nodeAttributeStart); if (row.Pr.Height !== undefined && row.Pr.Height != null) { oThis._WriteInt1(0, (row.Pr.Height.Value * 36000) >> 0); } oThis.WriteUChar(g_nodeAttributeEnd); oThis.StartRecord(0); var _len = row_info.Cells.length; oThis.WriteULong(_len); for (var i = 0; i < _len; i++) { oThis.StartRecord(1); var _info = row_info.Cells[i]; if (_info.isEmpty) { oThis.WriteEmptyTableCell(_info); } else { oThis.WriteUChar(g_nodeAttributeStart); if (_info.vMerge === false && _info.row_span > 1) { oThis._WriteInt1(1, _info.row_span); } if (_info.hMerge === false && _info.grid_span > 1) { oThis._WriteInt1(2, _info.grid_span); } if (_info.hMerge === true) { oThis._WriteBool1(3, true); } if (_info.vMerge === true) { oThis._WriteBool1(4, true); } oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteTableCell(_info.Cell); } oThis.EndRecord(); } oThis.EndRecord(); }; this.WriteTableCell = function (cell) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); var _marg = cell.Pr.TableCellMar; if (_marg !== undefined && null != _marg && null != _marg.Left && null != _marg.Top && null != _marg.Right && null != _marg.Bottom) { oThis._WriteInt1(0, (_marg.Left.W * 36000) >> 0); oThis._WriteInt1(1, (_marg.Top.W * 36000) >> 0); oThis._WriteInt1(2, (_marg.Right.W * 36000) >> 0); oThis._WriteInt1(3, (_marg.Bottom.W * 36000) >> 0); } oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord3(0, cell.Pr.TableCellBorders.Left, oThis.WriteTableCellBorder); oThis.WriteRecord3(1, cell.Pr.TableCellBorders.Top, oThis.WriteTableCellBorder); oThis.WriteRecord3(2, cell.Pr.TableCellBorders.Right, oThis.WriteTableCellBorder); oThis.WriteRecord3(3, cell.Pr.TableCellBorders.Bottom, oThis.WriteTableCellBorder); var shd = cell.Pr.Shd; if (shd !== undefined && shd != null) { oThis.WriteRecord2(6, shd.unifill, oThis.WriteUniFill); } oThis.EndRecord(); oThis.StartRecord(1); oThis.WriteRecordArray(2, 0, cell.Content.Content, oThis.WriteParagraph); oThis.EndRecord(); }; this.WriteTableProps = function (obj) { oThis.WriteUChar(g_nodeAttributeStart); if (obj.style != -1) { oThis._WriteString1(0, oThis.tableStylesGuides[obj.style]); } oThis._WriteBool1(2, obj.look.m_bFirst_Row); oThis._WriteBool1(3, obj.look.m_bFirst_Col); oThis._WriteBool1(4, obj.look.m_bLast_Row); oThis._WriteBool1(5, obj.look.m_bLast_Col); oThis._WriteBool1(6, obj.look.m_bBand_Hor); oThis._WriteBool1(7, obj.look.m_bBand_Ver); oThis.WriteUChar(g_nodeAttributeEnd); var shd = obj.props.Shd; if (shd !== undefined && shd != null) { if (shd.unifill !== undefined && shd.unifill != null) { if (shd.unifill.fill !== undefined && shd.unifill.fill != null) { oThis.WriteRecord1(0, shd.unifill, oThis.WriteUniFill); } } } }; this.WriteGroupShape = function (group) { oThis.StartRecord(4); group.spPr.WriteXfrm = group.spPr.xfrm; var _old_ph = group.nvGrpSpPr.nvPr.ph; group.nvGrpSpPr.nvPr.ph = null; oThis.WriteRecord1(0, group.nvGrpSpPr, oThis.WriteUniNvPr); group.nvGrpSpPr.nvPr.ph = _old_ph; oThis.WriteRecord1(1, group.spPr, oThis.WriteGrpSpPr); group.spPr.WriteXfrm = null; var spTree = group.spTree; var _len = spTree.length; if (0 != _len) { oThis.StartRecord(2); oThis.WriteULong(_len); for (var i = 0; i < _len; i++) { oThis.StartRecord(0); if (spTree[i] instanceof CShape) { oThis.WriteShape(spTree[i]); } else { if (spTree[i] instanceof CImageShape) { oThis.WriteImage(spTree[i]); } else { if (spTree[i] instanceof CGroupShape) { oThis.WriteGroupShape(spTree[i]); } else { if (spTree[i] instanceof CGraphicFrame && spTree[i].graphicObject instanceof CTable) { oThis.WriteTable(spTree[i]); } else { if (typeof CChartAsGroup != "undefined" && spTree[i] instanceof CChartAsGroup) { oThis.WriteChart(spTree[i]); } } } } } oThis.EndRecord(0); } oThis.EndRecord(); } oThis.EndRecord(); }; this.WriteGrpSpPr = function (grpSpPr) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteLimit2(0, grpSpPr.bwMode); oThis.WriteUChar(g_nodeAttributeEnd); if (grpSpPr.WriteXfrm && grpSpPr.WriteXfrm.isNotNull()) { oThis.WriteRecord2(0, grpSpPr.WriteXfrm, oThis.WriteXfrm); } oThis.WriteRecord1(1, grpSpPr.Fill, oThis.WriteUniFill); }; this.WriteSpPr = function (spPr) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteLimit2(0, spPr.bwMode); oThis.WriteUChar(g_nodeAttributeEnd); var _fill = spPr.Fill; var bIsExistFill = false; if (_fill !== undefined && _fill != null && _fill.fill !== undefined && _fill.fill != null) { bIsExistFill = true; } var bIsExistLn = false; if (spPr.ln !== undefined && spPr.ln != null) { _fill = spPr.ln.Fill; if (_fill !== undefined && _fill != null && _fill.fill !== undefined && _fill.fill != null) { bIsExistLn = true; } } if (spPr.WriteXfrm && spPr.WriteXfrm.isNotNull()) { oThis.WriteRecord2(0, spPr.WriteXfrm, oThis.WriteXfrm); } oThis.WriteRecord2(1, spPr.geometry, oThis.WriteGeometry); if (spPr.geometry === undefined || spPr.geometry == null) { if (bIsExistFill || bIsExistLn) { oThis.StartRecord(1); oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, "rect"); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); oThis.EndRecord(); } } oThis.WriteRecord1(2, spPr.Fill, oThis.WriteUniFill); oThis.WriteRecord2(3, spPr.ln, oThis.WriteLn); }; this.WriteXfrm = function (xfrm) { if (oThis.IsWordWriter === true) { return oThis.WriteXfrmRot(xfrm); } oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt4(0, xfrm.offX, c_dScalePPTXSizes); oThis._WriteInt4(1, xfrm.offY, c_dScalePPTXSizes); oThis._WriteInt4(2, xfrm.extX, c_dScalePPTXSizes); oThis._WriteInt4(3, xfrm.extY, c_dScalePPTXSizes); oThis._WriteInt4(4, xfrm.chOffX, c_dScalePPTXSizes); oThis._WriteInt4(5, xfrm.chOffY, c_dScalePPTXSizes); oThis._WriteInt4(6, xfrm.chExtX, c_dScalePPTXSizes); oThis._WriteInt4(7, xfrm.chExtY, c_dScalePPTXSizes); oThis._WriteBool2(8, xfrm.flipH); oThis._WriteBool2(9, xfrm.flipV); oThis._WriteInt4(10, xfrm.rot, 180 * 60000 / Math.PI); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteXfrmRot = function (xfrm) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt4(0, xfrm.offX, c_dScalePPTXSizes); oThis._WriteInt4(1, xfrm.offY, c_dScalePPTXSizes); oThis._WriteInt4(2, xfrm.extX, c_dScalePPTXSizes); oThis._WriteInt4(3, xfrm.extY, c_dScalePPTXSizes); oThis._WriteInt4(4, xfrm.chOffX, c_dScalePPTXSizes); oThis._WriteInt4(5, xfrm.chOffY, c_dScalePPTXSizes); oThis._WriteInt4(6, xfrm.chExtX, c_dScalePPTXSizes); oThis._WriteInt4(7, xfrm.chExtY, c_dScalePPTXSizes); oThis._WriteBool2(8, xfrm.flipH); oThis._WriteBool2(9, xfrm.flipV); if (xfrm.rot != null) { var nCheckInvert = 0; if (true == xfrm.flipH) { nCheckInvert += 1; } if (true == xfrm.flipV) { nCheckInvert += 1; } var _rot = (xfrm.rot * 180 * 60000 / Math.PI) >> 0; var _n360 = 360 * 60000; if (_rot > _n360) { var _nDel = (_rot / _n360) >> 0; _rot = _rot - _nDel * _n360; } else { if (_rot < 0) { var _nDel = (-_rot / _n360) >> 0; _nDel += 1; _rot = _rot + _nDel * _n360; } } if (nCheckInvert == 1) { _rot = _n360 - _rot; } oThis._WriteInt1(10, _rot); } oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteUniNvPr = function (nv) { oThis.WriteRecord1(0, nv.cNvPr, oThis.Write_cNvPr); oThis.WriteRecord1(2, nv.nvPr, oThis.Write_nvPr); }; this.Write_cNvPr = function (cNvPr) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteInt1(0, cNvPr.id); oThis._WriteString1(1, cNvPr.name); oThis.WriteUChar(g_nodeAttributeEnd); }; this.Write_nvPr = function (nvPr) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteBool2(0, nvPr.isPhoto); oThis._WriteBool2(1, nvPr.userDrawn); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord2(0, nvPr.ph, oThis.Write_ph); }; this.Write_ph = function (ph) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteBool2(0, ph.hasCustomPrompt); oThis._WriteString2(1, ph.idx); oThis._WriteLimit2(2, ph.orient); oThis._WriteLimit2(3, ph.sz); oThis._WriteLimit2(4, ph.type); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WriteGeometry = function (geom) { if (undefined === geom || null == geom) { return; } if (geom.preset != null && geom.preset != null) { oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, geom.preset); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteAdj(geom.gdLst, geom.avLst, 0); oThis.EndRecord(); } else { oThis.StartRecord(2); oThis.WriteAdj(geom.gdLst, geom.avLst, 0); oThis.WriteGuides(geom.gdLstInfo, 1); oThis.WriteAh(geom.ahXYLstInfo, geom.ahPolarLstInfo, 2); oThis.WriteCnx(geom.cnxLstInfo, 3); oThis.WritePathLst(geom.pathLst, 4); oThis.WriteRecord2(5, geom.rectS, oThis.WriteTextRect); oThis.EndRecord(); } }; this.WriteAdj = function (gdLst, avLst, rec_num) { var _len = 0; for (var i in avLst) { ++_len; } if (0 == _len) { return; } oThis.StartRecord(rec_num); oThis.WriteULong(_len); for (var i in avLst) { oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, i); oThis._WriteInt1(1, 15); oThis._WriteString1(2, "" + (gdLst[i] >> 0)); oThis.WriteUChar(g_nodeAttributeEnd); oThis.EndRecord(); } oThis.EndRecord(); }; this.WriteGuides = function (gdLst, rec_num) { var _len = gdLst.length; if (0 == rec_num) { return; } this.StartRecord(rec_num); this.WriteULong(_len); for (var i = 0; i < _len; i++) { this.StartRecord(1); var _gd = gdLst[i]; this.WriteUChar(g_nodeAttributeStart); this._WriteString1(0, _gd.name); this._WriteInt1(1, _gd.formula); if (_gd.x !== undefined) { this._WriteString1(2, _gd.x); } if (_gd.y !== undefined) { this._WriteString1(3, _gd.y); } if (_gd.z !== undefined) { this._WriteString1(4, _gd.z); } this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); } this.EndRecord(); }; this.WriteAh = function (ahLstXY, ahLstPolar, rec_num) { var _len = 0; for (var i in ahLstXY) { ++_len; } for (var i in ahLstPolar) { ++_len; } if (0 == rec_num) { return; } this.StartRecord(rec_num); this.WriteULong(_len); for (var i in ahLstXY) { this.StartRecord(1); var _ah = ahLstXY[i]; this.StartRecord(2); this.WriteUChar(g_nodeAttributeStart); this._WriteString2(0, _ah.posX); this._WriteString2(1, _ah.posY); this._WriteString2(2, _ah.gdRefX); this._WriteString2(3, _ah.gdRefY); this._WriteString2(4, _ah.maxX); this._WriteString2(5, _ah.maxY); this._WriteString2(6, _ah.minX); this._WriteString2(7, _ah.minY); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); this.EndRecord(); } for (var i in ahLstPolar) { this.StartRecord(1); var _ah = ahLstPolar[i]; this.StartRecord(2); this.WriteUChar(g_nodeAttributeStart); this._WriteString2(0, _ah.posX); this._WriteString2(1, _ah.posY); this._WriteString2(2, _ah.gdRefAng); this._WriteString2(3, _ah.gdRefR); this._WriteString2(4, _ah.maxAng); this._WriteString2(5, _ah.maxR); this._WriteString2(6, _ah.minAng); this._WriteString2(7, _ah.minR); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); this.EndRecord(); } this.EndRecord(); }; this.WriteCnx = function (cnxLst, rec_num) { var _len = 0; for (var i in cnxLst) { ++_len; } if (0 == rec_num) { return; } this.StartRecord(rec_num); this.WriteULong(_len); for (var i in cnxLst) { this.StartRecord(1); var _gd = cnxLst[i]; this.WriteUChar(g_nodeAttributeStart); this._WriteString1(0, _gd.x); this._WriteString1(1, _gd.y); this._WriteString1(2, _gd.ang); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); } this.EndRecord(); }; this.WriteTextRect = function (rect) { oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString2(0, rect.l); oThis._WriteString2(1, rect.t); oThis._WriteString2(2, rect.r); oThis._WriteString2(3, rect.b); oThis.WriteUChar(g_nodeAttributeEnd); }; this.WritePathLst = function (pathLst, rec_num) { var _len = pathLst.length; if (0 == _len) { return; } this.StartRecord(rec_num); this.WriteULong(_len); for (var i = 0; i < _len; i++) { this.StartRecord(1); var _path = pathLst[i]; this.WriteUChar(g_nodeAttributeStart); this._WriteBool2(0, _path.extrusionOk); if (_path.fill != null && _path.fill !== undefined) { this._WriteLimit1(1, (_path.fill == "none") ? 4 : 5); } this._WriteInt2(2, _path.pathH); this._WriteBool2(3, _path.stroke); this._WriteInt2(4, _path.pathW); this.WriteUChar(g_nodeAttributeEnd); var _comms = _path.ArrPathCommandInfo; var _count = _comms.length; if (0 != _count) { this.StartRecord(0); this.WriteULong(_count); for (var j = 0; j < _count; j++) { this.StartRecord(0); var cmd = _comms[j]; switch (cmd.id) { case moveTo: this.StartRecord(1); this.WriteUChar(g_nodeAttributeStart); this._WriteString1(0, "" + cmd.X); this._WriteString1(1, "" + cmd.Y); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); break; case lineTo: this.StartRecord(2); this.WriteUChar(g_nodeAttributeStart); this._WriteString1(0, "" + cmd.X); this._WriteString1(1, "" + cmd.Y); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); break; case bezier3: this.StartRecord(6); this.WriteUChar(g_nodeAttributeStart); this._WriteString1(0, "" + cmd.X0); this._WriteString1(1, "" + cmd.Y0); this._WriteString1(2, "" + cmd.X1); this._WriteString1(3, "" + cmd.Y1); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); break; case bezier4: this.StartRecord(4); this.WriteUChar(g_nodeAttributeStart); this._WriteString1(0, "" + cmd.X0); this._WriteString1(1, "" + cmd.Y0); this._WriteString1(2, "" + cmd.X1); this._WriteString1(3, "" + cmd.Y1); this._WriteString1(4, "" + cmd.X2); this._WriteString1(5, "" + cmd.Y2); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); break; case arcTo: this.StartRecord(5); this.WriteUChar(g_nodeAttributeStart); this._WriteString1(0, "" + cmd.wR); this._WriteString1(1, "" + cmd.hR); this._WriteString1(2, "" + cmd.stAng); this._WriteString1(3, "" + cmd.swAng); this.WriteUChar(g_nodeAttributeEnd); this.EndRecord(); break; case close: this.StartRecord(3); this.EndRecord(); break; } this.EndRecord(); } this.EndRecord(); } this.EndRecord(); } this.EndRecord(); }; this.WriteTableStyle = function (num, tableStyle) { oThis.StartRecord(1); oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteString1(0, oThis.tableStylesGuides[num]); var __name = tableStyle.Name; __name = __name.replace(/&/g, "_"); __name = __name.replace(/>/g, "_"); __name = __name.replace(/</g, "_"); __name = __name.replace(/"/g, "_"); __name = __name.replace(/'/g, "_"); oThis._WriteString2(1, __name); oThis.WriteUChar(g_nodeAttributeEnd); if (undefined !== tableStyle.TablePr.Shd && null != tableStyle.TablePr.Shd) { oThis.StartRecord(0); if (tableStyle.TablePr.Shd.unifill != null && tableStyle.TablePr.Shd.unifill !== undefined) { oThis.StartRecord(0); oThis.WriteRecord2(0, tableStyle.TablePr.Shd.unifill, oThis.WriteUniFill); oThis.EndRecord(); } if (tableStyle.TablePr.Shd.fillRef != null && tableStyle.TablePr.Shd.fillRef !== undefined) { oThis.WriteRecord2(1, tableStyle.TablePr.Shd.fillRef, oThis.WriteStyleRef); } oThis.EndRecord(); } if (tableStyle.TableWholeTable) { oThis.StartRecord(1); oThis.WriteTableStylePartWH(tableStyle.TableWholeTable, tableStyle.TablePr); oThis.EndRecord(); } oThis.WriteRecord2(2, tableStyle.TableBand1Horz, oThis.WriteTableStylePart); oThis.WriteRecord2(3, tableStyle.TableBand2Horz, oThis.WriteTableStylePart); oThis.WriteRecord2(4, tableStyle.TableBand1Vert, oThis.WriteTableStylePart); oThis.WriteRecord2(5, tableStyle.TableBand2Vert, oThis.WriteTableStylePart); oThis.WriteRecord2(6, tableStyle.TableLastCol, oThis.WriteTableStylePart); oThis.WriteRecord2(7, tableStyle.TableFirstCol, oThis.WriteTableStylePart); oThis.WriteRecord2(8, tableStyle.TableFirstRow, oThis.WriteTableStylePart); oThis.WriteRecord2(9, tableStyle.TableLastRow, oThis.WriteTableStylePart); oThis.WriteRecord2(10, tableStyle.TableBRCell, oThis.WriteTableStylePart); oThis.WriteRecord2(11, tableStyle.TableBLCell, oThis.WriteTableStylePart); oThis.WriteRecord2(12, tableStyle.TableTRCell, oThis.WriteTableStylePart); oThis.WriteRecord2(13, tableStyle.TableTLCell, oThis.WriteTableStylePart); oThis.EndRecord(); }; this.WriteTableStylePart = function (_part) { var bIsFontRef = false; if (_part.TextPr.fontRef !== undefined && _part.TextPr.fontRef != null) { bIsFontRef = true; } var bIsFill = false; if (_part.TextPr.unifill !== undefined && _part.TextPr.unifill != null) { bIsFill = true; } if (bIsFontRef || bIsFill) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord2(0, _part.TextPr.fontRef, oThis.WriteFontRef); if (bIsFill && _part.TextPr.unifill.fill !== undefined && _part.TextPr.unifill.fill != null && _part.TextPr.unifill.fill.type == FILL_TYPE_SOLID) { oThis.WriteRecord2(1, _part.TextPr.unifill.fill.color, oThis.WriteUniColor); } oThis.EndRecord(); } oThis.StartRecord(1); oThis.StartRecord(0); oThis.WriteRecord3(0, _part.TableCellPr.TableCellBorders.Left, oThis.WriteTableCellBorderLineStyle); oThis.WriteRecord3(1, _part.TableCellPr.TableCellBorders.Right, oThis.WriteTableCellBorderLineStyle); oThis.WriteRecord3(2, _part.TableCellPr.TableCellBorders.Top, oThis.WriteTableCellBorderLineStyle); oThis.WriteRecord3(3, _part.TableCellPr.TableCellBorders.Bottom, oThis.WriteTableCellBorderLineStyle); oThis.WriteRecord3(4, _part.TableCellPr.TableCellBorders.InsideH, oThis.WriteTableCellBorderLineStyle); oThis.WriteRecord3(5, _part.TableCellPr.TableCellBorders.InsideV, oThis.WriteTableCellBorderLineStyle); oThis.EndRecord(); var _Shd = _part.TableCellPr.Shd; if (undefined !== _Shd && null != _Shd) { oThis.WriteRecord2(1, _Shd.fillRef, oThis.WriteStyleRef); if (_Shd.unifill !== undefined && _Shd.unifill != null) { oThis.StartRecord(2); oThis.WriteRecord2(0, _Shd.unifill, oThis.WriteUniFill); oThis.EndRecord(); } } oThis.EndRecord(); }; this.WriteTableStylePartWH = function (_part, tablePr) { var bIsFontRef = false; if (_part.TextPr.fontRef !== undefined && _part.TextPr.fontRef != null) { bIsFontRef = true; } var bIsFill = false; if (_part.TextPr.unifill !== undefined && _part.TextPr.unifill != null) { bIsFill = true; } if (bIsFontRef || bIsFill) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); oThis.WriteRecord2(0, _part.TextPr.fontRef, oThis.WriteFontRef); if (bIsFill && _part.TextPr.unifill.fill !== undefined && _part.TextPr.unifill.fill != null && _part.TextPr.unifill.fill.type == FILL_TYPE_SOLID) { oThis.WriteRecord2(1, _part.TextPr.unifill.fill.color, oThis.WriteUniColor); } oThis.EndRecord(); } oThis.StartRecord(1); oThis.StartRecord(0); var bIsRet = false; bIsRet = oThis.WriteRecord3(0, _part.TableCellPr.TableCellBorders.Left, oThis.WriteTableCellBorderLineStyle); if (!bIsRet) { oThis.WriteTableCellBorderLineStyle2(0, tablePr.TableBorders.Left); } bIsRet = oThis.WriteRecord3(1, _part.TableCellPr.TableCellBorders.Right, oThis.WriteTableCellBorderLineStyle); if (!bIsRet) { oThis.WriteTableCellBorderLineStyle2(1, tablePr.TableBorders.Right); } bIsRet = oThis.WriteRecord3(2, _part.TableCellPr.TableCellBorders.Top, oThis.WriteTableCellBorderLineStyle); if (!bIsRet) { oThis.WriteTableCellBorderLineStyle2(2, tablePr.TableBorders.Top); } bIsRet = oThis.WriteRecord3(3, _part.TableCellPr.TableCellBorders.Bottom, oThis.WriteTableCellBorderLineStyle); if (!bIsRet) { oThis.WriteTableCellBorderLineStyle2(3, tablePr.TableBorders.Bottom); } oThis.WriteTableCellBorderLineStyle2(4, _part.TablePr.TableBorders.InsideH); oThis.WriteTableCellBorderLineStyle2(5, _part.TablePr.TableBorders.InsideV); oThis.EndRecord(); var _Shd = _part.TableCellPr.Shd; if (undefined !== _Shd && null != _Shd) { oThis.WriteRecord2(1, _Shd.fillRef, oThis.WriteStyleRef); if (_Shd.unifill !== undefined && _Shd.unifill != null) { oThis.StartRecord(2); oThis.WriteRecord2(0, _Shd.unifill, oThis.WriteUniFill); oThis.EndRecord(); } } oThis.EndRecord(); }; this.WriteTableCellBorder = function (_border) { if (_border.Value == border_None) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); var _unifill = new CUniFill(); _unifill.fill = new CNoFill(); oThis.WriteRecord2(0, _unifill, oThis.WriteUniFill); oThis.EndRecord(); return; } var bIsFill = false; var bIsSize = false; if ((_border.unifill !== undefined && _border.unifill != null) || _border.Color instanceof CDocumentColor) { bIsFill = true; } if (_border.Size !== undefined && _border.Size != null) { bIsSize = true; } if (bIsFill || bIsSize) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); if (bIsSize) { oThis._WriteInt2(3, (_border.Size * 36000) >> 0); } oThis.WriteUChar(g_nodeAttributeEnd); if (!_border.unifill && _border.Color instanceof CDocumentColor) { var _unifill = new CUniFill(); _unifill.fill = new CSolidFill(); _unifill.fill.color.color = new CRGBColor(); _unifill.fill.color.color.RGBA.R = _border.Color.r; _unifill.fill.color.color.RGBA.G = _border.Color.g; _unifill.fill.color.color.RGBA.B = _border.Color.b; oThis.WriteRecord2(0, _unifill, oThis.WriteUniFill); } oThis.WriteRecord2(0, _border.unifill, oThis.WriteUniFill); oThis.EndRecord(); } }; this.WriteTableCellBorderLineStyle2 = function (rec_type, _border) { if (!_border) { oThis.StartRecord(rec_type); oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); var _unifill = new CUniFill(); _unifill.fill = new CNoFill(); oThis.WriteRecord2(0, _unifill, oThis.WriteUniFill); oThis.EndRecord(); oThis.EndRecord(); return; } else { oThis.WriteRecord3(rec_type, _border, oThis.WriteTableCellBorderLineStyle); } }; this.WriteTableCellBorderLineStyle = function (_border) { if (_border.Value == border_None) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); oThis.WriteUChar(g_nodeAttributeEnd); var _unifill = new CUniFill(); _unifill.fill = new CNoFill(); oThis.WriteRecord2(0, _unifill, oThis.WriteUniFill); oThis.EndRecord(); return; } var bIsFill = false; var bIsSize = false; var bIsLnRef = false; if ((_border.unifill !== undefined && _border.unifill != null) || _border.Color instanceof CDocumentColor) { bIsFill = true; } if (_border.Size !== undefined && _border.Size != null) { bIsSize = true; } if (bIsFill && bIsSize) { oThis.StartRecord(0); oThis.WriteUChar(g_nodeAttributeStart); if (bIsSize) { oThis._WriteInt2(3, (_border.Size * 36000) >> 0); } oThis.WriteUChar(g_nodeAttributeEnd); if (!_border.unifill && _border.Color instanceof CDocumentColor) { var _unifill = new CUniFill(); _unifill.fill = new CSolidFill(); _unifill.fill.color.color = new CRGBColor(); _unifill.fill.color.color.RGBA.R = _border.Color.r; _unifill.fill.color.color.RGBA.G = _border.Color.g; _unifill.fill.color.color.RGBA.B = _border.Color.b; oThis.WriteRecord2(0, _unifill, oThis.WriteUniFill); } oThis.WriteRecord2(0, _border.unifill, oThis.WriteUniFill); oThis.EndRecord(); } oThis.WriteRecord2(1, _border.lnRef, oThis.WriteStyleRef); }; }<|fim▁end|>
return; } oThis.WriteUChar(g_nodeAttributeStart); oThis._WriteLimit2(0, ln.algn);
<|file_name|>PetrifyDamageSource.java<|end_file_name|><|fim▁begin|>package telinc.telicraft.util; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.util.ChatMessageComponent; import net.minecraft.util.StatCollector; public class PetrifyDamageSource extends TelicraftDamageSource { protected EntityLivingBase entity; protected PetrifyDamageSource(EntityLivingBase par1EntityLivingBase) { super("petrify"); this.setDamageAllowedInCreativeMode(); this.setDamageBypassesArmor(); this.entity = par1EntityLivingBase; } @Override<|fim▁hole|> public Entity getEntity() { return this.entity; } @Override public ChatMessageComponent getDeathMessage(EntityLivingBase par1EntityLivingBase) { EntityLivingBase attacker = par1EntityLivingBase.func_94060_bK(); String deathSelf = this.getRawDeathMessage(); String deathPlayer = deathSelf + ".player"; return attacker != null && StatCollector.func_94522_b(deathPlayer) ? ChatMessageComponent.func_111082_b(deathPlayer, new Object[]{par1EntityLivingBase.getTranslatedEntityName(), attacker.getTranslatedEntityName()}) : ChatMessageComponent.func_111082_b(deathSelf, new Object[]{par1EntityLivingBase.getTranslatedEntityName()}); } }<|fim▁end|>
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict'; const express = require('express'); const path = require('path'); const logger = require('morgan'); const cookieParser = require('cookie-parser'); const bodyParser = require('body-parser'); const cons = require('consolidate'); const session = require('express-session'); const mysql = require('mysql'); const index = require('./routes/index'); const users = require('./routes/users'); const signup = require('./routes/signup'); const savequestion = require('./routes/savequestion'); const questions = require('./routes/questions'); const questionbyid = require('./routes/questionbyid'); const saveanswer = require('./routes/saveanswer'); const login = require('./routes/login'); const logout = require('./routes/logout'); const downvote = require('./routes/downvote'); const upvote = require('./routes/upvote'); const app = express(); const config = require('./config'); console.log(`Run at ${ process.env.NODE_ENV}`); const con = mysql.createConnection(config.MYSQL); con.connect((err) => { if (err) { throw err; } console.log('Connected to database'); }); const sess = { secret: 'awhdRkRSKSLSIEHGugdYdfd' }; app.use(session(sess)); // view engine setup app.engine('html', cons.swig); app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'html'); // uncomment after placing your favicon in /public //app.use(favicon(path.join(__dirname, 'public', 'favicon.ico'))); app.use(logger('dev')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(cookieParser()); app.use(express.static(path.join(__dirname, 'public')));<|fim▁hole|>app.use('/users', users); app.use('/login', login); app.use('/savequestion', savequestion); app.use('/questions', questions); app.use('/questionbyid', questionbyid); app.use('/saveanswer', saveanswer); app.use('/signup', signup); app.use('/logout', logout); app.use('/downvote', downvote); app.use('/upvote', upvote); // catch 404 and forward to error handler app.use((req, res, next) => { const err = new Error('Not Found'); err.status = 404; next(err); }); // Author Narendra Kumawat app.use((err, req, res, next) => { res.locals.message = err.message; res.locals.error = req.app.get('env') === 'development' ? err : {}; res.status(err.status || 500); res.render('error'); }); module.exports = app;<|fim▁end|>
app.use('/', index);
<|file_name|>asset_service.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/cloud/asset/v1beta1/asset_service.proto package asset import ( fmt "fmt" proto "github.com/golang/protobuf/proto" timestamp "github.com/golang/protobuf/ptypes/timestamp" context "golang.org/x/net/context" _ "google.golang.org/genproto/googleapis/api/annotations" longrunning "google.golang.org/genproto/googleapis/longrunning" grpc "google.golang.org/grpc" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // Asset content type. type ContentType int32 const ( // Unspecified content type. ContentType_CONTENT_TYPE_UNSPECIFIED ContentType = 0 // Resource metadata. ContentType_RESOURCE ContentType = 1 // The actual IAM policy set on a resource. ContentType_IAM_POLICY ContentType = 2 ) var ContentType_name = map[int32]string{ 0: "CONTENT_TYPE_UNSPECIFIED", 1: "RESOURCE", 2: "IAM_POLICY", } var ContentType_value = map[string]int32{ "CONTENT_TYPE_UNSPECIFIED": 0, "RESOURCE": 1, "IAM_POLICY": 2, } func (x ContentType) String() string { return proto.EnumName(ContentType_name, int32(x)) } func (ContentType) EnumDescriptor() ([]byte, []int) { return fileDescriptor_565cfbddaa85b7d6, []int{0} } // Export asset request. type ExportAssetsRequest struct { // Required. The relative name of the root asset. This can only be an organization // number (such as "organizations/123"), a project ID (such as // "projects/my-project-id"), or a project number (such as "projects/12345"). Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` // Timestamp to take an asset snapshot. This can only be set to a timestamp in // the past or of the current time. If not specified, the current time will be // used. Due to delays in resource data collection and indexing, there is a // volatile window during which running the same query may get different // results. ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` // A list of asset types of which to take a snapshot for. For example: // "google.compute.disk". If specified, only matching assets will be returned. AssetTypes []string `protobuf:"bytes,3,rep,name=asset_types,json=assetTypes,proto3" json:"asset_types,omitempty"` // Asset content type. If not specified, no content but the asset name will be // returned. ContentType ContentType `protobuf:"varint,4,opt,name=content_type,json=contentType,proto3,enum=google.cloud.asset.v1beta1.ContentType" json:"content_type,omitempty"` // Required. Output configuration indicating where the results will be output // to. All results will be in newline delimited JSON format. OutputConfig *OutputConfig `protobuf:"bytes,5,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ExportAssetsRequest) Reset() { *m = ExportAssetsRequest{} } func (m *ExportAssetsRequest) String() string { return proto.CompactTextString(m) } func (*ExportAssetsRequest) ProtoMessage() {} func (*ExportAssetsRequest) Descriptor() ([]byte, []int) { return fileDescriptor_565cfbddaa85b7d6, []int{0} } func (m *ExportAssetsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ExportAssetsRequest.Unmarshal(m, b) } func (m *ExportAssetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ExportAssetsRequest.Marshal(b, m, deterministic) } func (m *ExportAssetsRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_ExportAssetsRequest.Merge(m, src) } func (m *ExportAssetsRequest) XXX_Size() int { return xxx_messageInfo_ExportAssetsRequest.Size(m) } func (m *ExportAssetsRequest) XXX_DiscardUnknown() { xxx_messageInfo_ExportAssetsRequest.DiscardUnknown(m) } var xxx_messageInfo_ExportAssetsRequest proto.InternalMessageInfo func (m *ExportAssetsRequest) GetParent() string { if m != nil { return m.Parent } return "" } func (m *ExportAssetsRequest) GetReadTime() *timestamp.Timestamp { if m != nil { return m.ReadTime } return nil } func (m *ExportAssetsRequest) GetAssetTypes() []string { if m != nil { return m.AssetTypes } return nil } func (m *ExportAssetsRequest) GetContentType() ContentType { if m != nil { return m.ContentType } return ContentType_CONTENT_TYPE_UNSPECIFIED } func (m *ExportAssetsRequest) GetOutputConfig() *OutputConfig { if m != nil { return m.OutputConfig } return nil } // The export asset response. This message is returned by the // [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] method in the returned // [google.longrunning.Operation.response][google.longrunning.Operation.response] field. type ExportAssetsResponse struct { // Time the snapshot was taken. ReadTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` // Output configuration indicating where the results were output to. // All results are in JSON format. OutputConfig *OutputConfig `protobuf:"bytes,2,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ExportAssetsResponse) Reset() { *m = ExportAssetsResponse{} } func (m *ExportAssetsResponse) String() string { return proto.CompactTextString(m) } func (*ExportAssetsResponse) ProtoMessage() {} func (*ExportAssetsResponse) Descriptor() ([]byte, []int) { return fileDescriptor_565cfbddaa85b7d6, []int{1} } func (m *ExportAssetsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ExportAssetsResponse.Unmarshal(m, b) } func (m *ExportAssetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ExportAssetsResponse.Marshal(b, m, deterministic) } func (m *ExportAssetsResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_ExportAssetsResponse.Merge(m, src) } func (m *ExportAssetsResponse) XXX_Size() int { return xxx_messageInfo_ExportAssetsResponse.Size(m) } func (m *ExportAssetsResponse) XXX_DiscardUnknown() { xxx_messageInfo_ExportAssetsResponse.DiscardUnknown(m) } var xxx_messageInfo_ExportAssetsResponse proto.InternalMessageInfo func (m *ExportAssetsResponse) GetReadTime() *timestamp.Timestamp { if m != nil { return m.ReadTime } return nil } func (m *ExportAssetsResponse) GetOutputConfig() *OutputConfig { if m != nil { return m.OutputConfig } return nil } // Batch get assets history request. type BatchGetAssetsHistoryRequest struct { // Required. The relative name of the root asset. It can only be an // organization number (such as "organizations/123"), a project ID (such as // "projects/my-project-id")", or a project number (such as "projects/12345"). Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` // A list of the full names of the assets. For example: // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. // See [Resource Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) // for more info. // // The request becomes a no-op if the asset name list is empty, and the max // size of the asset name list is 100 in one request. AssetNames []string `protobuf:"bytes,2,rep,name=asset_names,json=assetNames,proto3" json:"asset_names,omitempty"` // Required. The content type. ContentType ContentType `protobuf:"varint,3,opt,name=content_type,json=contentType,proto3,enum=google.cloud.asset.v1beta1.ContentType" json:"content_type,omitempty"` // Required. The time window for the asset history. The start time is // required. The returned results contain all temporal assets whose time // window overlap with read_time_window. ReadTimeWindow *TimeWindow `protobuf:"bytes,4,opt,name=read_time_window,json=readTimeWindow,proto3" json:"read_time_window,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *BatchGetAssetsHistoryRequest) Reset() { *m = BatchGetAssetsHistoryRequest{} } func (m *BatchGetAssetsHistoryRequest) String() string { return proto.CompactTextString(m) } func (*BatchGetAssetsHistoryRequest) ProtoMessage() {} func (*BatchGetAssetsHistoryRequest) Descriptor() ([]byte, []int) { return fileDescriptor_565cfbddaa85b7d6, []int{2} } func (m *BatchGetAssetsHistoryRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BatchGetAssetsHistoryRequest.Unmarshal(m, b) } func (m *BatchGetAssetsHistoryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_BatchGetAssetsHistoryRequest.Marshal(b, m, deterministic) } func (m *BatchGetAssetsHistoryRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_BatchGetAssetsHistoryRequest.Merge(m, src) } func (m *BatchGetAssetsHistoryRequest) XXX_Size() int { return xxx_messageInfo_BatchGetAssetsHistoryRequest.Size(m) } func (m *BatchGetAssetsHistoryRequest) XXX_DiscardUnknown() { xxx_messageInfo_BatchGetAssetsHistoryRequest.DiscardUnknown(m) } var xxx_messageInfo_BatchGetAssetsHistoryRequest proto.InternalMessageInfo func (m *BatchGetAssetsHistoryRequest) GetParent() string { if m != nil { return m.Parent } return "" } <|fim▁hole|> } return nil } func (m *BatchGetAssetsHistoryRequest) GetContentType() ContentType { if m != nil { return m.ContentType } return ContentType_CONTENT_TYPE_UNSPECIFIED } func (m *BatchGetAssetsHistoryRequest) GetReadTimeWindow() *TimeWindow { if m != nil { return m.ReadTimeWindow } return nil } // Batch get assets history response. type BatchGetAssetsHistoryResponse struct { // A list of assets with valid time windows. Assets []*TemporalAsset `protobuf:"bytes,1,rep,name=assets,proto3" json:"assets,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *BatchGetAssetsHistoryResponse) Reset() { *m = BatchGetAssetsHistoryResponse{} } func (m *BatchGetAssetsHistoryResponse) String() string { return proto.CompactTextString(m) } func (*BatchGetAssetsHistoryResponse) ProtoMessage() {} func (*BatchGetAssetsHistoryResponse) Descriptor() ([]byte, []int) { return fileDescriptor_565cfbddaa85b7d6, []int{3} } func (m *BatchGetAssetsHistoryResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BatchGetAssetsHistoryResponse.Unmarshal(m, b) } func (m *BatchGetAssetsHistoryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_BatchGetAssetsHistoryResponse.Marshal(b, m, deterministic) } func (m *BatchGetAssetsHistoryResponse) XXX_Merge(src proto.Message) { xxx_messageInfo_BatchGetAssetsHistoryResponse.Merge(m, src) } func (m *BatchGetAssetsHistoryResponse) XXX_Size() int { return xxx_messageInfo_BatchGetAssetsHistoryResponse.Size(m) } func (m *BatchGetAssetsHistoryResponse) XXX_DiscardUnknown() { xxx_messageInfo_BatchGetAssetsHistoryResponse.DiscardUnknown(m) } var xxx_messageInfo_BatchGetAssetsHistoryResponse proto.InternalMessageInfo func (m *BatchGetAssetsHistoryResponse) GetAssets() []*TemporalAsset { if m != nil { return m.Assets } return nil } // Output configuration for export assets destination. type OutputConfig struct { // Asset export destination. // // Types that are valid to be assigned to Destination: // *OutputConfig_GcsDestination Destination isOutputConfig_Destination `protobuf_oneof:"destination"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *OutputConfig) Reset() { *m = OutputConfig{} } func (m *OutputConfig) String() string { return proto.CompactTextString(m) } func (*OutputConfig) ProtoMessage() {} func (*OutputConfig) Descriptor() ([]byte, []int) { return fileDescriptor_565cfbddaa85b7d6, []int{4} } func (m *OutputConfig) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_OutputConfig.Unmarshal(m, b) } func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) } func (m *OutputConfig) XXX_Merge(src proto.Message) { xxx_messageInfo_OutputConfig.Merge(m, src) } func (m *OutputConfig) XXX_Size() int { return xxx_messageInfo_OutputConfig.Size(m) } func (m *OutputConfig) XXX_DiscardUnknown() { xxx_messageInfo_OutputConfig.DiscardUnknown(m) } var xxx_messageInfo_OutputConfig proto.InternalMessageInfo type isOutputConfig_Destination interface { isOutputConfig_Destination() } type OutputConfig_GcsDestination struct { GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"` } func (*OutputConfig_GcsDestination) isOutputConfig_Destination() {} func (m *OutputConfig) GetDestination() isOutputConfig_Destination { if m != nil { return m.Destination } return nil } func (m *OutputConfig) GetGcsDestination() *GcsDestination { if x, ok := m.GetDestination().(*OutputConfig_GcsDestination); ok { return x.GcsDestination } return nil } // XXX_OneofFuncs is for the internal use of the proto package. func (*OutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { return _OutputConfig_OneofMarshaler, _OutputConfig_OneofUnmarshaler, _OutputConfig_OneofSizer, []interface{}{ (*OutputConfig_GcsDestination)(nil), } } func _OutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { m := msg.(*OutputConfig) // destination switch x := m.Destination.(type) { case *OutputConfig_GcsDestination: b.EncodeVarint(1<<3 | proto.WireBytes) if err := b.EncodeMessage(x.GcsDestination); err != nil { return err } case nil: default: return fmt.Errorf("OutputConfig.Destination has unexpected type %T", x) } return nil } func _OutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { m := msg.(*OutputConfig) switch tag { case 1: // destination.gcs_destination if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(GcsDestination) err := b.DecodeMessage(msg) m.Destination = &OutputConfig_GcsDestination{msg} return true, err default: return false, nil } } func _OutputConfig_OneofSizer(msg proto.Message) (n int) { m := msg.(*OutputConfig) // destination switch x := m.Destination.(type) { case *OutputConfig_GcsDestination: s := proto.Size(x.GcsDestination) n += 1 // tag and wire n += proto.SizeVarint(uint64(s)) n += s case nil: default: panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) } return n } // A Cloud Storage location. type GcsDestination struct { // The path of the Cloud Storage objects. It's the same path that is used by // gsutil. For example: "gs://bucket_name/object_path". See [Viewing and Editing Object Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata) // for more information. Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *GcsDestination) Reset() { *m = GcsDestination{} } func (m *GcsDestination) String() string { return proto.CompactTextString(m) } func (*GcsDestination) ProtoMessage() {} func (*GcsDestination) Descriptor() ([]byte, []int) { return fileDescriptor_565cfbddaa85b7d6, []int{5} } func (m *GcsDestination) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GcsDestination.Unmarshal(m, b) } func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) } func (m *GcsDestination) XXX_Merge(src proto.Message) { xxx_messageInfo_GcsDestination.Merge(m, src) } func (m *GcsDestination) XXX_Size() int { return xxx_messageInfo_GcsDestination.Size(m) } func (m *GcsDestination) XXX_DiscardUnknown() { xxx_messageInfo_GcsDestination.DiscardUnknown(m) } var xxx_messageInfo_GcsDestination proto.InternalMessageInfo func (m *GcsDestination) GetUri() string { if m != nil { return m.Uri } return "" } func init() { proto.RegisterEnum("google.cloud.asset.v1beta1.ContentType", ContentType_name, ContentType_value) proto.RegisterType((*ExportAssetsRequest)(nil), "google.cloud.asset.v1beta1.ExportAssetsRequest") proto.RegisterType((*ExportAssetsResponse)(nil), "google.cloud.asset.v1beta1.ExportAssetsResponse") proto.RegisterType((*BatchGetAssetsHistoryRequest)(nil), "google.cloud.asset.v1beta1.BatchGetAssetsHistoryRequest") proto.RegisterType((*BatchGetAssetsHistoryResponse)(nil), "google.cloud.asset.v1beta1.BatchGetAssetsHistoryResponse") proto.RegisterType((*OutputConfig)(nil), "google.cloud.asset.v1beta1.OutputConfig") proto.RegisterType((*GcsDestination)(nil), "google.cloud.asset.v1beta1.GcsDestination") } func init() { proto.RegisterFile("google/cloud/asset/v1beta1/asset_service.proto", fileDescriptor_565cfbddaa85b7d6) } var fileDescriptor_565cfbddaa85b7d6 = []byte{ // 765 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xcd, 0x4e, 0xdb, 0x4a, 0x14, 0xc6, 0xce, 0xbd, 0x08, 0x26, 0x21, 0x37, 0x77, 0xfa, 0x23, 0x2b, 0x82, 0x12, 0xb9, 0x52, 0x09, 0x59, 0xd8, 0x22, 0x2c, 0x28, 0x54, 0x55, 0x45, 0x42, 0x0a, 0xa9, 0x4a, 0x12, 0x99, 0x40, 0x05, 0x42, 0xb2, 0x1c, 0x67, 0x70, 0x5d, 0x25, 0x33, 0xae, 0x67, 0x0c, 0xa5, 0x55, 0x37, 0xed, 0x23, 0x74, 0xdf, 0x45, 0x97, 0x5d, 0xf5, 0x35, 0xda, 0x6d, 0x5f, 0xa0, 0x8b, 0xbe, 0x42, 0xf7, 0x95, 0x67, 0x6c, 0x70, 0x44, 0x30, 0x6a, 0xd9, 0xe5, 0xcc, 0x7c, 0xdf, 0x77, 0xe6, 0x7c, 0xe7, 0xf8, 0x04, 0x68, 0x0e, 0x21, 0xce, 0x00, 0xe9, 0xf6, 0x80, 0x04, 0x7d, 0xdd, 0xa2, 0x14, 0x31, 0xfd, 0x78, 0xa9, 0x87, 0x98, 0xb5, 0x24, 0x22, 0x93, 0x22, 0xff, 0xd8, 0xb5, 0x91, 0xe6, 0xf9, 0x84, 0x11, 0x58, 0x14, 0x78, 0x8d, 0xe3, 0x35, 0x8e, 0xd0, 0x22, 0x7c, 0x71, 0x36, 0xd2, 0xb2, 0x3c, 0x57, 0xb7, 0x30, 0x26, 0xcc, 0x62, 0x2e, 0xc1, 0x54, 0x30, 0x8b, 0x0b, 0x57, 0x65, 0x8a, 0x81, 0x77, 0x23, 0xe0, 0x80, 0x60, 0xc7, 0x0f, 0x30, 0x76, 0xb1, 0xa3, 0x13, 0x0f, 0xf9, 0x23, 0x6a, 0xf3, 0x11, 0x88, 0x47, 0xbd, 0xe0, 0x48, 0x67, 0xee, 0x10, 0x51, 0x66, 0x0d, 0x3d, 0x01, 0x50, 0x3f, 0xc9, 0xe0, 0x46, 0xe3, 0x95, 0x47, 0x7c, 0xb6, 0xce, 0xc5, 0x0d, 0xf4, 0x32, 0x40, 0x94, 0xc1, 0xdb, 0x60, 0xd2, 0xb3, 0x7c, 0x84, 0x99, 0x22, 0x95, 0xa4, 0xf2, 0xb4, 0x11, 0x45, 0x70, 0x05, 0x4c, 0xfb, 0xc8, 0xea, 0x9b, 0xa1, 0x8e, 0x22, 0x97, 0xa4, 0x72, 0xb6, 0x5a, 0x8c, 0xcc, 0xd1, 0xe2, 0x24, 0x5a, 0x37, 0x4e, 0x62, 0x4c, 0x85, 0xe0, 0x30, 0x84, 0xf3, 0x20, 0x2b, 0x8c, 0x62, 0xa7, 0x1e, 0xa2, 0x4a, 0xa6, 0x94, 0x29, 0x4f, 0x1b, 0x80, 0x1f, 0x75, 0xc3, 0x13, 0xf8, 0x04, 0xe4, 0x6c, 0x82, 0x19, 0xc2, 0x02, 0xa2, 0xfc, 0x53, 0x92, 0xca, 0xf9, 0xea, 0x82, 0x76, 0xb9, 0x93, 0x5a, 0x5d, 0xe0, 0x43, 0xbe, 0x91, 0xb5, 0xcf, 0x03, 0xb8, 0x0d, 0x66, 0x48, 0xc0, 0xbc, 0x80, 0x99, 0x36, 0xc1, 0x47, 0xae, 0xa3, 0xfc, 0xcb, 0x5f, 0x5a, 0x4e, 0x13, 0x6b, 0x73, 0x42, 0x9d, 0xe3, 0x8d, 0x1c, 0x49, 0x44, 0xea, 0x47, 0x09, 0xdc, 0x1c, 0x35, 0x89, 0x7a, 0x04, 0x53, 0x34, 0xea, 0x86, 0xf4, 0x07, 0x6e, 0x5c, 0x78, 0xa0, 0x7c, 0xad, 0x07, 0xfe, 0x92, 0xc0, 0x6c, 0xcd, 0x62, 0xf6, 0xf3, 0x4d, 0x14, 0x3d, 0x71, 0xcb, 0xa5, 0x8c, 0xf8, 0xa7, 0x57, 0xb5, 0xf3, 0xac, 0x2b, 0xd8, 0x1a, 0x22, 0xaa, 0xc8, 0x89, 0xae, 0xb4, 0xc2, 0x93, 0x0b, 0x5d, 0xc9, 0x5c, 0xa3, 0x2b, 0x1d, 0x50, 0x38, 0x73, 0xcb, 0x3c, 0x71, 0x71, 0x9f, 0x9c, 0xf0, 0x2e, 0x67, 0xab, 0xf7, 0xd2, 0xf4, 0x42, 0xc3, 0x9e, 0x71, 0xb4, 0x91, 0x8f, 0x0d, 0x14, 0xb1, 0xda, 0x03, 0x73, 0x97, 0x94, 0x1d, 0x35, 0x68, 0x1d, 0x4c, 0x8a, 0x8f, 0x46, 0x91, 0x4a, 0x99, 0x72, 0xb6, 0xba, 0x98, 0x9a, 0x08, 0x0d, 0x3d, 0xe2, 0x5b, 0x03, 0x2e, 0x65, 0x44, 0x44, 0x95, 0x81, 0x5c, 0xd2, 0x79, 0xb8, 0x0b, 0xfe, 0x73, 0x6c, 0x6a, 0xf6, 0x11, 0x65, 0x2e, 0xe6, 0x1f, 0x5b, 0xd4, 0xf9, 0x4a, 0x9a, 0xf6, 0xa6, 0x4d, 0x37, 0xce, 0x19, 0x5b, 0x13, 0x46, 0xde, 0x19, 0x39, 0xa9, 0xcd, 0x80, 0x6c, 0x42, 0x52, 0x55, 0x41, 0x7e, 0x94, 0x02, 0x0b, 0x20, 0x13, 0xf8, 0x6e, 0xd4, 0xbf, 0xf0, 0x67, 0xa5, 0x09, 0xb2, 0x09, 0xaf, 0xe1, 0x2c, 0x50, 0xea, 0xed, 0x56, 0xb7, 0xd1, 0xea, 0x9a, 0xdd, 0xfd, 0x4e, 0xc3, 0xdc, 0x6d, 0xed, 0x74, 0x1a, 0xf5, 0xe6, 0xe3, 0x66, 0x63, 0xa3, 0x30, 0x01, 0x73, 0x60, 0xca, 0x68, 0xec, 0xb4, 0x77, 0x8d, 0x7a, 0xa3, 0x20, 0xc1, 0x3c, 0x00, 0xcd, 0xf5, 0x6d, 0xb3, 0xd3, 0x7e, 0xda, 0xac, 0xef, 0x17, 0xe4, 0xea, 0x8f, 0x0c, 0xc8, 0xf1, 0xb2, 0x77, 0xc4, 0x1a, 0x83, 0x5f, 0x25, 0x90, 0x4b, 0x8e, 0x3c, 0xd4, 0xd3, 0xaa, 0x1b, 0xb3, 0x41, 0x8a, 0x73, 0x31, 0x21, 0xb1, 0xa0, 0xb4, 0x76, 0xbc, 0xa0, 0x54, 0xf7, 0xdd, 0xf7, 0x9f, 0x1f, 0x64, 0x5b, 0x5d, 0x3c, 0xdb, 0x6e, 0x6f, 0xc4, 0x4c, 0x3e, 0xf4, 0x7c, 0xf2, 0x02, 0xd9, 0x8c, 0xea, 0x95, 0xb7, 0x6b, 0x28, 0x21, 0xbc, 0x26, 0x55, 0x0e, 0x96, 0x55, 0xed, 0x02, 0x9e, 0xf8, 0x8e, 0x85, 0xdd, 0xd7, 0x62, 0xef, 0x8d, 0x21, 0xc1, 0xf7, 0x32, 0xb8, 0x35, 0x76, 0x4c, 0xe0, 0xfd, 0xb4, 0xa2, 0xd2, 0x3e, 0xa8, 0xe2, 0xea, 0x5f, 0x30, 0xc5, 0x4c, 0xaa, 0x01, 0xaf, 0x9c, 0xc0, 0x6a, 0x6a, 0xe5, 0xbd, 0x71, 0x1a, 0x07, 0xab, 0x70, 0xe5, 0xea, 0xfa, 0xc7, 0x52, 0x6b, 0x5f, 0x24, 0x70, 0xc7, 0x26, 0xc3, 0x94, 0x77, 0xd7, 0xfe, 0x4f, 0x8e, 0x40, 0x27, 0xdc, 0x5f, 0x1d, 0xe9, 0xe0, 0x51, 0x44, 0x70, 0xc8, 0xc0, 0xc2, 0x8e, 0x46, 0x7c, 0x47, 0x77, 0x10, 0xe6, 0xdb, 0x4d, 0x17, 0x57, 0x96, 0xe7, 0xd2, 0x71, 0xff, 0x57, 0x0f, 0x78, 0xf4, 0x59, 0x2e, 0x6e, 0x0a, 0x85, 0x3a, 0x4f, 0xc9, 0x73, 0x68, 0x7b, 0x4b, 0xb5, 0x10, 0xf2, 0x2d, 0xbe, 0x3c, 0xe4, 0x97, 0x87, 0xfc, 0xf2, 0x70, 0x4f, 0xf0, 0x7b, 0x93, 0x3c, 0xcb, 0xf2, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x53, 0x7f, 0x18, 0x6f, 0x7e, 0x07, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // AssetServiceClient is the client API for AssetService service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type AssetServiceClient interface { // Exports assets with time and resource types to a given Cloud Storage // location. The output format is newline-delimited JSON. // This API implements the [google.longrunning.Operation][google.longrunning.Operation] API allowing you // to keep track of the export. ExportAssets(ctx context.Context, in *ExportAssetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) // Batch gets the update history of assets that overlap a time window. // For RESOURCE content, this API outputs history with asset in both // non-delete or deleted status. // For IAM_POLICY content, this API outputs history when the asset and its // attached IAM POLICY both exist. This can create gaps in the output history. BatchGetAssetsHistory(ctx context.Context, in *BatchGetAssetsHistoryRequest, opts ...grpc.CallOption) (*BatchGetAssetsHistoryResponse, error) } type assetServiceClient struct { cc *grpc.ClientConn } func NewAssetServiceClient(cc *grpc.ClientConn) AssetServiceClient { return &assetServiceClient{cc} } func (c *assetServiceClient) ExportAssets(ctx context.Context, in *ExportAssetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { out := new(longrunning.Operation) err := c.cc.Invoke(ctx, "/google.cloud.asset.v1beta1.AssetService/ExportAssets", in, out, opts...) if err != nil { return nil, err } return out, nil } func (c *assetServiceClient) BatchGetAssetsHistory(ctx context.Context, in *BatchGetAssetsHistoryRequest, opts ...grpc.CallOption) (*BatchGetAssetsHistoryResponse, error) { out := new(BatchGetAssetsHistoryResponse) err := c.cc.Invoke(ctx, "/google.cloud.asset.v1beta1.AssetService/BatchGetAssetsHistory", in, out, opts...) if err != nil { return nil, err } return out, nil } // AssetServiceServer is the server API for AssetService service. type AssetServiceServer interface { // Exports assets with time and resource types to a given Cloud Storage // location. The output format is newline-delimited JSON. // This API implements the [google.longrunning.Operation][google.longrunning.Operation] API allowing you // to keep track of the export. ExportAssets(context.Context, *ExportAssetsRequest) (*longrunning.Operation, error) // Batch gets the update history of assets that overlap a time window. // For RESOURCE content, this API outputs history with asset in both // non-delete or deleted status. // For IAM_POLICY content, this API outputs history when the asset and its // attached IAM POLICY both exist. This can create gaps in the output history. BatchGetAssetsHistory(context.Context, *BatchGetAssetsHistoryRequest) (*BatchGetAssetsHistoryResponse, error) } func RegisterAssetServiceServer(s *grpc.Server, srv AssetServiceServer) { s.RegisterService(&_AssetService_serviceDesc, srv) } func _AssetService_ExportAssets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ExportAssetsRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(AssetServiceServer).ExportAssets(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.asset.v1beta1.AssetService/ExportAssets", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(AssetServiceServer).ExportAssets(ctx, req.(*ExportAssetsRequest)) } return interceptor(ctx, in, info, handler) } func _AssetService_BatchGetAssetsHistory_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(BatchGetAssetsHistoryRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(AssetServiceServer).BatchGetAssetsHistory(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.cloud.asset.v1beta1.AssetService/BatchGetAssetsHistory", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(AssetServiceServer).BatchGetAssetsHistory(ctx, req.(*BatchGetAssetsHistoryRequest)) } return interceptor(ctx, in, info, handler) } var _AssetService_serviceDesc = grpc.ServiceDesc{ ServiceName: "google.cloud.asset.v1beta1.AssetService", HandlerType: (*AssetServiceServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "ExportAssets", Handler: _AssetService_ExportAssets_Handler, }, { MethodName: "BatchGetAssetsHistory", Handler: _AssetService_BatchGetAssetsHistory_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "google/cloud/asset/v1beta1/asset_service.proto", }<|fim▁end|>
func (m *BatchGetAssetsHistoryRequest) GetAssetNames() []string { if m != nil { return m.AssetNames
<|file_name|>server.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package app does all of the work necessary to create a Kubernetes // APIServer by binding together the API, master and APIServer infrastructure. // It can be configured and called directly or via the hyperkube framework. package app import ( "crypto/tls" "fmt" "net" "net/http" "net/url" "os" "strconv" "strings" "time" "k8s.io/kubernetes/openshift-kube-apiserver/admission/admissionenablement" "k8s.io/kubernetes/openshift-kube-apiserver/enablement" "k8s.io/kubernetes/openshift-kube-apiserver/openshiftkubeapiserver" "github.com/go-openapi/spec" "github.com/spf13/cobra" corev1 "k8s.io/api/core/v1" extensionsapiserver "k8s.io/apiextensions-apiserver/pkg/apiserver" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" utilerrors "k8s.io/apimachinery/pkg/util/errors" utilnet "k8s.io/apimachinery/pkg/util/net" "k8s.io/apimachinery/pkg/util/sets" utilwait "k8s.io/apimachinery/pkg/util/wait" "k8s.io/apiserver/pkg/admission" "k8s.io/apiserver/pkg/authentication/authenticator" "k8s.io/apiserver/pkg/authorization/authorizer" openapinamer "k8s.io/apiserver/pkg/endpoints/openapi" "k8s.io/apiserver/pkg/endpoints/request" genericapiserver "k8s.io/apiserver/pkg/server" "k8s.io/apiserver/pkg/server/filters" serveroptions "k8s.io/apiserver/pkg/server/options" serverstorage "k8s.io/apiserver/pkg/server/storage" "k8s.io/apiserver/pkg/storage/etcd3/preflight" "k8s.io/apiserver/pkg/util/feature" utilfeature "k8s.io/apiserver/pkg/util/feature" "k8s.io/apiserver/pkg/util/term" "k8s.io/apiserver/pkg/util/webhook" clientgoinformers "k8s.io/client-go/informers" clientgoclientset "k8s.io/client-go/kubernetes" "k8s.io/client-go/util/keyutil" cloudprovider "k8s.io/cloud-provider" cliflag "k8s.io/component-base/cli/flag" "k8s.io/component-base/cli/globalflag" "k8s.io/component-base/metrics" _ "k8s.io/component-base/metrics/prometheus/workqueue" // for workqueue metric registration "k8s.io/component-base/version" "k8s.io/component-base/version/verflag" "k8s.io/klog" aggregatorapiserver "k8s.io/kube-aggregator/pkg/apiserver" aggregatorscheme "k8s.io/kube-aggregator/pkg/apiserver/scheme" "k8s.io/kubernetes/cmd/kube-apiserver/app/options" "k8s.io/kubernetes/pkg/api/legacyscheme" "k8s.io/kubernetes/pkg/apis/core" v1 "k8s.io/kubernetes/pkg/apis/core/v1" "k8s.io/kubernetes/pkg/capabilities" serviceaccountcontroller "k8s.io/kubernetes/pkg/controller/serviceaccount" "k8s.io/kubernetes/pkg/features" generatedopenapi "k8s.io/kubernetes/pkg/generated/openapi" "k8s.io/kubernetes/pkg/kubeapiserver" kubeapiserveradmission "k8s.io/kubernetes/pkg/kubeapiserver/admission" kubeauthenticator "k8s.io/kubernetes/pkg/kubeapiserver/authenticator" "k8s.io/kubernetes/pkg/kubeapiserver/authorizer/modes" kubeoptions "k8s.io/kubernetes/pkg/kubeapiserver/options" kubeserver "k8s.io/kubernetes/pkg/kubeapiserver/server" "k8s.io/kubernetes/pkg/master" "k8s.io/kubernetes/pkg/master/reconcilers" "k8s.io/kubernetes/pkg/master/tunneler" "k8s.io/kubernetes/pkg/registry/cachesize" eventstorage "k8s.io/kubernetes/pkg/registry/core/event/storage" rbacrest "k8s.io/kubernetes/pkg/registry/rbac/rest" "k8s.io/kubernetes/pkg/serviceaccount" utilflag "k8s.io/kubernetes/pkg/util/flag" "k8s.io/kubernetes/plugin/pkg/auth/authenticator/token/bootstrap" ) const ( etcdRetryLimit = 60 etcdRetryInterval = 1 * time.Second ) // NewAPIServerCommand creates a *cobra.Command object with default parameters func NewAPIServerCommand() *cobra.Command { s := options.NewServerRunOptions() cmd := &cobra.Command{ Use: "kube-apiserver", Long: `The Kubernetes API server validates and configures data for the api objects which include pods, services, replicationcontrollers, and others. The API Server services REST operations and provides the frontend to the cluster's shared state through which all other components interact.`, RunE: func(cmd *cobra.Command, args []string) error { verflag.PrintAndExitIfRequested() if len(s.OpenShiftConfig) > 0 { openshiftConfig, err := enablement.GetOpenshiftConfig(s.OpenShiftConfig) if err != nil { klog.Fatal(err) } enablement.ForceOpenShift(openshiftConfig) // this forces a patch to be called // TODO we're going to try to remove bits of the patching. configPatchFn := openshiftkubeapiserver.NewOpenShiftKubeAPIServerConfigPatch(openshiftConfig) OpenShiftKubeAPIServerConfigPatch = configPatchFn args, err := openshiftkubeapiserver.ConfigToFlags(openshiftConfig) if err != nil { return err } // hopefully this resets the flags? if err := cmd.ParseFlags(args); err != nil { return err } // print merged flags (merged from OpenshiftConfig) utilflag.PrintFlags(cmd.Flags()) enablement.ForceGlobalInitializationForOpenShift() admissionenablement.InstallOpenShiftAdmissionPlugins(s) } else { // print default flags utilflag.PrintFlags(cmd.Flags()) } // set default options completedOptions, err := Complete(s) if err != nil { return err }<|fim▁hole|> } return Run(completedOptions, genericapiserver.SetupSignalHandler()) }, } fs := cmd.Flags() namedFlagSets := s.Flags() verflag.AddFlags(namedFlagSets.FlagSet("global")) globalflag.AddGlobalFlags(namedFlagSets.FlagSet("global"), cmd.Name()) options.AddCustomGlobalFlags(namedFlagSets.FlagSet("generic")) for _, f := range namedFlagSets.FlagSets { fs.AddFlagSet(f) } usageFmt := "Usage:\n %s\n" cols, _, _ := term.TerminalSize(cmd.OutOrStdout()) cmd.SetUsageFunc(func(cmd *cobra.Command) error { fmt.Fprintf(cmd.OutOrStderr(), usageFmt, cmd.UseLine()) cliflag.PrintSections(cmd.OutOrStderr(), namedFlagSets, cols) return nil }) cmd.SetHelpFunc(func(cmd *cobra.Command, args []string) { fmt.Fprintf(cmd.OutOrStdout(), "%s\n\n"+usageFmt, cmd.Long, cmd.UseLine()) cliflag.PrintSections(cmd.OutOrStdout(), namedFlagSets, cols) }) return cmd } // Run runs the specified APIServer. This should never exit. func Run(completeOptions completedServerRunOptions, stopCh <-chan struct{}) error { // To help debugging, immediately log version klog.Infof("Version: %+v", version.Get()) server, err := CreateServerChain(completeOptions, stopCh) if err != nil { return err } prepared, err := server.PrepareRun() if err != nil { return err } return prepared.Run(stopCh) } // CreateServerChain creates the apiservers connected via delegation. func CreateServerChain(completedOptions completedServerRunOptions, stopCh <-chan struct{}) (*aggregatorapiserver.APIAggregator, error) { nodeTunneler, proxyTransport, err := CreateNodeDialer(completedOptions) if err != nil { return nil, err } kubeAPIServerConfig, insecureServingInfo, serviceResolver, pluginInitializer, err := CreateKubeAPIServerConfig(completedOptions, nodeTunneler, proxyTransport) if err != nil { return nil, err } // If additional API servers are added, they should be gated. apiExtensionsConfig, err := createAPIExtensionsConfig(*kubeAPIServerConfig.GenericConfig, kubeAPIServerConfig.ExtraConfig.VersionedInformers, pluginInitializer, completedOptions.ServerRunOptions, completedOptions.MasterCount, serviceResolver, webhook.NewDefaultAuthenticationInfoResolverWrapper(proxyTransport, kubeAPIServerConfig.GenericConfig.LoopbackClientConfig)) if err != nil { return nil, err } apiExtensionsServer, err := createAPIExtensionsServer(apiExtensionsConfig, genericapiserver.NewEmptyDelegate()) if err != nil { return nil, err } kubeAPIServer, err := CreateKubeAPIServer(kubeAPIServerConfig, apiExtensionsServer.GenericAPIServer) if err != nil { return nil, err } // aggregator comes last in the chain aggregatorConfig, err := createAggregatorConfig(*kubeAPIServerConfig.GenericConfig, completedOptions.ServerRunOptions, kubeAPIServerConfig.ExtraConfig.VersionedInformers, serviceResolver, proxyTransport, pluginInitializer) if err != nil { return nil, err } aggregatorServer, err := createAggregatorServer(aggregatorConfig, kubeAPIServer.GenericAPIServer, apiExtensionsServer.Informers) if err != nil { // we don't need special handling for innerStopCh because the aggregator server doesn't create any go routines return nil, err } if insecureServingInfo != nil { insecureHandlerChain := kubeserver.BuildInsecureHandlerChain(aggregatorServer.GenericAPIServer.UnprotectedHandler(), kubeAPIServerConfig.GenericConfig) if err := insecureServingInfo.Serve(insecureHandlerChain, kubeAPIServerConfig.GenericConfig.RequestTimeout, stopCh); err != nil { return nil, err } } return aggregatorServer, nil } // CreateKubeAPIServer creates and wires a workable kube-apiserver func CreateKubeAPIServer(kubeAPIServerConfig *master.Config, delegateAPIServer genericapiserver.DelegationTarget) (*master.Master, error) { kubeAPIServer, err := kubeAPIServerConfig.Complete().New(delegateAPIServer) if err != nil { return nil, err } return kubeAPIServer, nil } // CreateNodeDialer creates the dialer infrastructure to connect to the nodes. func CreateNodeDialer(s completedServerRunOptions) (tunneler.Tunneler, *http.Transport, error) { // Setup nodeTunneler if needed var nodeTunneler tunneler.Tunneler var proxyDialerFn utilnet.DialFunc if len(s.SSHUser) > 0 { // Get ssh key distribution func, if supported var installSSHKey tunneler.InstallSSHKey cloud, err := cloudprovider.InitCloudProvider(s.CloudProvider.CloudProvider, s.CloudProvider.CloudConfigFile) if err != nil { return nil, nil, fmt.Errorf("cloud provider could not be initialized: %v", err) } if cloud != nil { if instances, supported := cloud.Instances(); supported { installSSHKey = instances.AddSSHKeyToAllInstances } } if s.KubeletConfig.Port == 0 { return nil, nil, fmt.Errorf("must enable kubelet port if proxy ssh-tunneling is specified") } if s.KubeletConfig.ReadOnlyPort == 0 { return nil, nil, fmt.Errorf("must enable kubelet readonly port if proxy ssh-tunneling is specified") } // Set up the nodeTunneler // TODO(cjcullen): If we want this to handle per-kubelet ports or other // kubelet listen-addresses, we need to plumb through options. healthCheckPath := &url.URL{ Scheme: "http", Host: net.JoinHostPort("127.0.0.1", strconv.FormatUint(uint64(s.KubeletConfig.ReadOnlyPort), 10)), Path: "healthz", } nodeTunneler = tunneler.New(s.SSHUser, s.SSHKeyfile, healthCheckPath, installSSHKey) // Use the nodeTunneler's dialer when proxying to pods, services, and nodes proxyDialerFn = nodeTunneler.Dial } // Proxying to pods and services is IP-based... don't expect to be able to verify the hostname proxyTLSClientConfig := &tls.Config{InsecureSkipVerify: true} proxyTransport := utilnet.SetTransportDefaults(&http.Transport{ DialContext: proxyDialerFn, TLSClientConfig: proxyTLSClientConfig, }) return nodeTunneler, proxyTransport, nil } // CreateKubeAPIServerConfig creates all the resources for running the API server, but runs none of them func CreateKubeAPIServerConfig( s completedServerRunOptions, nodeTunneler tunneler.Tunneler, proxyTransport *http.Transport, ) ( *master.Config, *genericapiserver.DeprecatedInsecureServingInfo, aggregatorapiserver.ServiceResolver, []admission.PluginInitializer, error, ) { genericConfig, versionedInformers, insecureServingInfo, serviceResolver, pluginInitializers, admissionPostStartHook, storageFactory, err := buildGenericConfig(s.ServerRunOptions, proxyTransport) if err != nil { return nil, nil, nil, nil, err } if _, port, err := net.SplitHostPort(s.Etcd.StorageConfig.Transport.ServerList[0]); err == nil && port != "0" && len(port) != 0 { if err := utilwait.PollImmediate(etcdRetryInterval, etcdRetryLimit*etcdRetryInterval, preflight.EtcdConnection{ServerList: s.Etcd.StorageConfig.Transport.ServerList}.CheckEtcdServers); err != nil { return nil, nil, nil, nil, fmt.Errorf("error waiting for etcd connection: %v", err) } } capabilities.Initialize(capabilities.Capabilities{ AllowPrivileged: s.AllowPrivileged, // TODO(vmarmol): Implement support for HostNetworkSources. PrivilegedSources: capabilities.PrivilegedSources{ HostNetworkSources: []string{}, HostPIDSources: []string{}, HostIPCSources: []string{}, }, PerConnectionBandwidthLimitBytesPerSec: s.MaxConnectionBytesPerSec, }) if len(s.ShowHiddenMetricsForVersion) > 0 { metrics.SetShowHidden() } serviceIPRange, apiServerServiceIP, err := master.ServiceIPRange(s.PrimaryServiceClusterIPRange) if err != nil { return nil, nil, nil, nil, err } // defaults to empty range and ip var secondaryServiceIPRange net.IPNet // process secondary range only if provided by user if s.SecondaryServiceClusterIPRange.IP != nil { secondaryServiceIPRange, _, err = master.ServiceIPRange(s.SecondaryServiceClusterIPRange) if err != nil { return nil, nil, nil, nil, err } } var eventStorage *eventstorage.REST eventStorage, err = eventstorage.NewREST(genericConfig.RESTOptionsGetter, uint64(s.EventTTL.Seconds())) if err != nil { return nil, nil, nil, nil, err } genericConfig.EventSink = eventRegistrySink{eventStorage} config := &master.Config{ GenericConfig: genericConfig, ExtraConfig: master.ExtraConfig{ APIResourceConfigSource: storageFactory.APIResourceConfigSource, StorageFactory: storageFactory, EventTTL: s.EventTTL, KubeletClientConfig: s.KubeletConfig, EnableLogsSupport: s.EnableLogsHandler, ProxyTransport: proxyTransport, Tunneler: nodeTunneler, ServiceIPRange: serviceIPRange, APIServerServiceIP: apiServerServiceIP, SecondaryServiceIPRange: secondaryServiceIPRange, APIServerServicePort: 443, ServiceNodePortRange: s.ServiceNodePortRange, KubernetesServiceNodePort: s.KubernetesServiceNodePort, EndpointReconcilerType: reconcilers.Type(s.EndpointReconcilerType), MasterCount: s.MasterCount, ServiceAccountIssuer: s.ServiceAccountIssuer, ServiceAccountMaxExpiration: s.ServiceAccountTokenMaxExpiration, VersionedInformers: versionedInformers, }, } clientCAProvider, err := s.Authentication.ClientCert.GetClientCAContentProvider() if err != nil { return nil, nil, nil, nil, err } config.ExtraConfig.ClusterAuthenticationInfo.ClientCA = clientCAProvider requestHeaderConfig, err := s.Authentication.RequestHeader.ToAuthenticationRequestHeaderConfig() if err != nil { return nil, nil, nil, nil, err } if requestHeaderConfig != nil { config.ExtraConfig.ClusterAuthenticationInfo.RequestHeaderCA = requestHeaderConfig.CAContentProvider config.ExtraConfig.ClusterAuthenticationInfo.RequestHeaderAllowedNames = requestHeaderConfig.AllowedClientNames config.ExtraConfig.ClusterAuthenticationInfo.RequestHeaderExtraHeaderPrefixes = requestHeaderConfig.ExtraHeaderPrefixes config.ExtraConfig.ClusterAuthenticationInfo.RequestHeaderGroupHeaders = requestHeaderConfig.GroupHeaders config.ExtraConfig.ClusterAuthenticationInfo.RequestHeaderUsernameHeaders = requestHeaderConfig.UsernameHeaders } if err := config.GenericConfig.AddPostStartHook("start-kube-apiserver-admission-initializer", admissionPostStartHook); err != nil { return nil, nil, nil, nil, err } if nodeTunneler != nil { // Use the nodeTunneler's dialer to connect to the kubelet config.ExtraConfig.KubeletClientConfig.Dial = nodeTunneler.Dial } if config.GenericConfig.EgressSelector != nil { // Use the config.GenericConfig.EgressSelector lookup to find the dialer to connect to the kubelet config.ExtraConfig.KubeletClientConfig.Lookup = config.GenericConfig.EgressSelector.Lookup } return config, insecureServingInfo, serviceResolver, pluginInitializers, nil } // BuildGenericConfig takes the master server options and produces the genericapiserver.Config associated with it func buildGenericConfig( s *options.ServerRunOptions, proxyTransport *http.Transport, ) ( genericConfig *genericapiserver.Config, versionedInformers clientgoinformers.SharedInformerFactory, insecureServingInfo *genericapiserver.DeprecatedInsecureServingInfo, serviceResolver aggregatorapiserver.ServiceResolver, pluginInitializers []admission.PluginInitializer, admissionPostStartHook genericapiserver.PostStartHookFunc, storageFactory *serverstorage.DefaultStorageFactory, lastErr error, ) { genericConfig = genericapiserver.NewConfig(legacyscheme.Codecs) genericConfig.MergedResourceConfig = master.DefaultAPIResourceConfigSource() if lastErr = s.GenericServerRunOptions.ApplyTo(genericConfig); lastErr != nil { return } if lastErr = s.InsecureServing.ApplyTo(&insecureServingInfo, &genericConfig.LoopbackClientConfig); lastErr != nil { return } if lastErr = s.SecureServing.ApplyTo(&genericConfig.SecureServing, &genericConfig.LoopbackClientConfig); lastErr != nil { return } if lastErr = s.Authentication.ApplyTo(genericConfig); lastErr != nil { return } if lastErr = s.Features.ApplyTo(genericConfig); lastErr != nil { return } if lastErr = s.APIEnablement.ApplyTo(genericConfig, master.DefaultAPIResourceConfigSource(), legacyscheme.Scheme); lastErr != nil { return } if lastErr = s.EgressSelector.ApplyTo(genericConfig); lastErr != nil { return } genericConfig.OpenAPIConfig = genericapiserver.DefaultOpenAPIConfig(generatedopenapi.GetOpenAPIDefinitions, openapinamer.NewDefinitionNamer(legacyscheme.Scheme, extensionsapiserver.Scheme, aggregatorscheme.Scheme)) genericConfig.OpenAPIConfig.Info.Title = "Kubernetes" genericConfig.LongRunningFunc = filters.BasicLongRunningRequestCheck( sets.NewString("watch", "proxy"), sets.NewString("attach", "exec", "proxy", "log", "portforward"), ) kubeVersion := version.Get() genericConfig.Version = &kubeVersion storageFactoryConfig := kubeapiserver.NewStorageFactoryConfig() storageFactoryConfig.APIResourceConfig = genericConfig.MergedResourceConfig completedStorageFactoryConfig, err := storageFactoryConfig.Complete(s.Etcd) if err != nil { lastErr = err return } storageFactory, lastErr = completedStorageFactoryConfig.New() if lastErr != nil { return } if genericConfig.EgressSelector != nil { storageFactory.StorageConfig.Transport.EgressLookup = genericConfig.EgressSelector.Lookup } if lastErr = s.Etcd.ApplyWithStorageFactoryTo(storageFactory, genericConfig); lastErr != nil { return } // Use protobufs for self-communication. // Since not every generic apiserver has to support protobufs, we // cannot default to it in generic apiserver and need to explicitly // set it in kube-apiserver. genericConfig.LoopbackClientConfig.ContentConfig.ContentType = "application/vnd.kubernetes.protobuf" // Disable compression for self-communication, since we are going to be // on a fast local network genericConfig.LoopbackClientConfig.DisableCompression = true enablement.SetLoopbackClientConfig(genericConfig.LoopbackClientConfig) kubeClientConfig := genericConfig.LoopbackClientConfig clientgoExternalClient, err := clientgoclientset.NewForConfig(kubeClientConfig) if err != nil { lastErr = fmt.Errorf("failed to create real external clientset: %v", err) return } versionedInformers = clientgoinformers.NewSharedInformerFactory(clientgoExternalClient, 10*time.Minute) genericConfig.Authentication.Authenticator, genericConfig.OpenAPIConfig.SecurityDefinitions, err = BuildAuthenticator(s, clientgoExternalClient, versionedInformers) if err != nil { lastErr = fmt.Errorf("invalid authentication config: %v", err) return } genericConfig.Authorization.Authorizer, genericConfig.RuleResolver, err = BuildAuthorizer(s, versionedInformers) if err != nil { lastErr = fmt.Errorf("invalid authorization config: %v", err) return } if !sets.NewString(s.Authorization.Modes...).Has(modes.ModeRBAC) { genericConfig.DisabledPostStartHooks.Insert(rbacrest.PostStartHookName) } admissionConfig := &kubeapiserveradmission.Config{ ExternalInformers: versionedInformers, LoopbackClientConfig: genericConfig.LoopbackClientConfig, CloudConfigFile: s.CloudProvider.CloudConfigFile, } serviceResolver = buildServiceResolver(s.EnableAggregatorRouting, genericConfig.LoopbackClientConfig.Host, versionedInformers) authInfoResolverWrapper := webhook.NewDefaultAuthenticationInfoResolverWrapper(proxyTransport, genericConfig.LoopbackClientConfig) lastErr = s.Audit.ApplyTo( genericConfig, genericConfig.LoopbackClientConfig, versionedInformers, serveroptions.NewProcessInfo("kube-apiserver", "kube-system"), &serveroptions.WebhookOptions{ AuthInfoResolverWrapper: authInfoResolverWrapper, ServiceResolver: serviceResolver, }, ) if lastErr != nil { return } pluginInitializers, admissionPostStartHook, err = admissionConfig.New(proxyTransport, serviceResolver) if err != nil { lastErr = fmt.Errorf("failed to create admission plugin initializer: %v", err) return } if err := PatchKubeAPIServerConfig(genericConfig, versionedInformers, &pluginInitializers); err != nil { lastErr = fmt.Errorf("failed to patch: %v", err) return } if enablement.IsOpenShift() { admissionenablement.SetAdmissionDefaults(s, versionedInformers, clientgoExternalClient) } err = s.Admission.ApplyTo( genericConfig, versionedInformers, kubeClientConfig, feature.DefaultFeatureGate, pluginInitializers...) if err != nil { lastErr = fmt.Errorf("failed to initialize admission: %v", err) } return } // BuildAuthenticator constructs the authenticator func BuildAuthenticator(s *options.ServerRunOptions, extclient clientgoclientset.Interface, versionedInformer clientgoinformers.SharedInformerFactory) (authenticator.Request, *spec.SecurityDefinitions, error) { authenticatorConfig, err := s.Authentication.ToAuthenticationConfig() if err != nil { return nil, nil, err } if s.Authentication.ServiceAccounts.Lookup || utilfeature.DefaultFeatureGate.Enabled(features.TokenRequest) { authenticatorConfig.ServiceAccountTokenGetter = serviceaccountcontroller.NewGetterFromClient( extclient, versionedInformer.Core().V1().Secrets().Lister(), versionedInformer.Core().V1().ServiceAccounts().Lister(), versionedInformer.Core().V1().Pods().Lister(), ) } authenticatorConfig.BootstrapTokenAuthenticator = bootstrap.NewTokenAuthenticator( versionedInformer.Core().V1().Secrets().Lister().Secrets(metav1.NamespaceSystem), ) return authenticatorConfig.New() } // BuildAuthorizer constructs the authorizer func BuildAuthorizer(s *options.ServerRunOptions, versionedInformers clientgoinformers.SharedInformerFactory) (authorizer.Authorizer, authorizer.RuleResolver, error) { authorizationConfig := s.Authorization.ToAuthorizationConfig(versionedInformers) return authorizationConfig.New() } // completedServerRunOptions is a private wrapper that enforces a call of Complete() before Run can be invoked. type completedServerRunOptions struct { *options.ServerRunOptions } // Complete set default ServerRunOptions. // Should be called after kube-apiserver flags parsed. func Complete(s *options.ServerRunOptions) (completedServerRunOptions, error) { var options completedServerRunOptions // set defaults if err := s.GenericServerRunOptions.DefaultAdvertiseAddress(s.SecureServing.SecureServingOptions); err != nil { return options, err } if err := kubeoptions.DefaultAdvertiseAddress(s.GenericServerRunOptions, s.InsecureServing.DeprecatedInsecureServingOptions); err != nil { return options, err } // process s.ServiceClusterIPRange from list to Primary and Secondary // we process secondary only if provided by user serviceClusterIPRangeList := strings.Split(s.ServiceClusterIPRanges, ",") var apiServerServiceIP net.IP var serviceIPRange net.IPNet var err error // nothing provided by user, use default range (only applies to the Primary) if len(serviceClusterIPRangeList) == 0 { var primaryServiceClusterCIDR net.IPNet serviceIPRange, apiServerServiceIP, err = master.ServiceIPRange(primaryServiceClusterCIDR) if err != nil { return options, fmt.Errorf("error determining service IP ranges: %v", err) } s.PrimaryServiceClusterIPRange = serviceIPRange } if len(serviceClusterIPRangeList) > 0 { _, primaryServiceClusterCIDR, err := net.ParseCIDR(serviceClusterIPRangeList[0]) if err != nil { return options, fmt.Errorf("service-cluster-ip-range[0] is not a valid cidr") } serviceIPRange, apiServerServiceIP, err = master.ServiceIPRange(*(primaryServiceClusterCIDR)) if err != nil { return options, fmt.Errorf("error determining service IP ranges for primary service cidr: %v", err) } s.PrimaryServiceClusterIPRange = serviceIPRange } // user provided at least two entries if len(serviceClusterIPRangeList) > 1 { _, secondaryServiceClusterCIDR, err := net.ParseCIDR(serviceClusterIPRangeList[1]) if err != nil { return options, fmt.Errorf("service-cluster-ip-range[1] is not an ip net") } s.SecondaryServiceClusterIPRange = *(secondaryServiceClusterCIDR) } //note: validation asserts that the list is max of two dual stack entries if err := s.SecureServing.MaybeDefaultWithSelfSignedCerts(s.GenericServerRunOptions.AdvertiseAddress.String(), []string{"kubernetes.default.svc", "kubernetes.default", "kubernetes"}, []net.IP{apiServerServiceIP}); err != nil { return options, fmt.Errorf("error creating self-signed certificates: %v", err) } if len(s.GenericServerRunOptions.ExternalHost) == 0 { if len(s.GenericServerRunOptions.AdvertiseAddress) > 0 { s.GenericServerRunOptions.ExternalHost = s.GenericServerRunOptions.AdvertiseAddress.String() } else { if hostname, err := os.Hostname(); err == nil { s.GenericServerRunOptions.ExternalHost = hostname } else { return options, fmt.Errorf("error finding host name: %v", err) } } klog.Infof("external host was not specified, using %v", s.GenericServerRunOptions.ExternalHost) } s.Authentication.ApplyAuthorization(s.Authorization) // Use (ServiceAccountSigningKeyFile != "") as a proxy to the user enabling // TokenRequest functionality. This defaulting was convenient, but messed up // a lot of people when they rotated their serving cert with no idea it was // connected to their service account keys. We are taking this opportunity to // remove this problematic defaulting. if s.ServiceAccountSigningKeyFile == "" { // Default to the private server key for service account token signing if len(s.Authentication.ServiceAccounts.KeyFiles) == 0 && s.SecureServing.ServerCert.CertKey.KeyFile != "" { if kubeauthenticator.IsValidServiceAccountKeyFile(s.SecureServing.ServerCert.CertKey.KeyFile) { s.Authentication.ServiceAccounts.KeyFiles = []string{s.SecureServing.ServerCert.CertKey.KeyFile} } else { klog.Warning("No TLS key provided, service account token authentication disabled") } } } if s.ServiceAccountSigningKeyFile != "" && s.Authentication.ServiceAccounts.Issuer != "" { sk, err := keyutil.PrivateKeyFromFile(s.ServiceAccountSigningKeyFile) if err != nil { return options, fmt.Errorf("failed to parse service-account-issuer-key-file: %v", err) } if s.Authentication.ServiceAccounts.MaxExpiration != 0 { lowBound := time.Hour upBound := time.Duration(1<<32) * time.Second if s.Authentication.ServiceAccounts.MaxExpiration < lowBound || s.Authentication.ServiceAccounts.MaxExpiration > upBound { return options, fmt.Errorf("the serviceaccount max expiration must be between 1 hour to 2^32 seconds") } } s.ServiceAccountIssuer, err = serviceaccount.JWTTokenGenerator(s.Authentication.ServiceAccounts.Issuer, sk) if err != nil { return options, fmt.Errorf("failed to build token generator: %v", err) } s.ServiceAccountTokenMaxExpiration = s.Authentication.ServiceAccounts.MaxExpiration } if s.Etcd.EnableWatchCache { klog.V(2).Infof("Initializing cache sizes based on %dMB limit", s.GenericServerRunOptions.TargetRAMMB) sizes := cachesize.NewHeuristicWatchCacheSizes(s.GenericServerRunOptions.TargetRAMMB) if userSpecified, err := serveroptions.ParseWatchCacheSizes(s.Etcd.WatchCacheSizes); err == nil { for resource, size := range userSpecified { sizes[resource] = size } } s.Etcd.WatchCacheSizes, err = serveroptions.WriteWatchCacheSizes(sizes) if err != nil { return options, err } } if s.APIEnablement.RuntimeConfig != nil { for key, value := range s.APIEnablement.RuntimeConfig { if key == "v1" || strings.HasPrefix(key, "v1/") || key == "api/v1" || strings.HasPrefix(key, "api/v1/") { delete(s.APIEnablement.RuntimeConfig, key) s.APIEnablement.RuntimeConfig["/v1"] = value } if key == "api/legacy" { delete(s.APIEnablement.RuntimeConfig, key) } } } options.ServerRunOptions = s return options, nil } func buildServiceResolver(enabledAggregatorRouting bool, hostname string, informer clientgoinformers.SharedInformerFactory) webhook.ServiceResolver { var serviceResolver webhook.ServiceResolver if enabledAggregatorRouting { serviceResolver = aggregatorapiserver.NewEndpointServiceResolver( informer.Core().V1().Services().Lister(), informer.Core().V1().Endpoints().Lister(), ) } else { serviceResolver = aggregatorapiserver.NewClusterIPServiceResolver( informer.Core().V1().Services().Lister(), ) } // resolve kubernetes.default.svc locally if localHost, err := url.Parse(hostname); err == nil { serviceResolver = aggregatorapiserver.NewLoopbackServiceResolver(serviceResolver, localHost) } return serviceResolver } // eventRegistrySink wraps an event registry in order to be used as direct event sync, without going through the API. type eventRegistrySink struct { *eventstorage.REST } var _ genericapiserver.EventSink = eventRegistrySink{} func (s eventRegistrySink) Create(v1event *corev1.Event) (*corev1.Event, error) { ctx := request.WithNamespace(request.NewContext(), v1event.Namespace) var event core.Event if err := v1.Convert_v1_Event_To_core_Event(v1event, &event, nil); err != nil { return nil, err } obj, err := s.REST.Create(ctx, &event, nil, &metav1.CreateOptions{}) if err != nil { return nil, err } ret, ok := obj.(*core.Event) if !ok { return nil, fmt.Errorf("expected corev1.Event, got %T", obj) } var v1ret corev1.Event if err := v1.Convert_core_Event_To_v1_Event(ret, &v1ret, nil); err != nil { return nil, err } return &v1ret, nil }<|fim▁end|>
// validate options if errs := completedOptions.Validate(); len(errs) != 0 { return utilerrors.NewAggregate(errs)
<|file_name|>module.js<|end_file_name|><|fim▁begin|><|fim▁hole|>(function () { 'use strict'; var module = angular.module('memosWebApp', []); }());<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from .inventory import ( GetInventoryRequest, Inventory, ListInventoriesRequest, ListInventoriesResponse, InventoryView, ) from .os_policy import OSPolicy from .os_policy_assignment_reports import ( GetOSPolicyAssignmentReportRequest, ListOSPolicyAssignmentReportsRequest, ListOSPolicyAssignmentReportsResponse, OSPolicyAssignmentReport, ) from .os_policy_assignments import ( CreateOSPolicyAssignmentRequest, DeleteOSPolicyAssignmentRequest, GetOSPolicyAssignmentRequest, ListOSPolicyAssignmentRevisionsRequest, ListOSPolicyAssignmentRevisionsResponse, ListOSPolicyAssignmentsRequest, ListOSPolicyAssignmentsResponse, OSPolicyAssignment, OSPolicyAssignmentOperationMetadata, UpdateOSPolicyAssignmentRequest, ) from .osconfig_common import FixedOrPercent from .patch_deployments import ( CreatePatchDeploymentRequest, DeletePatchDeploymentRequest,<|fim▁hole|> OneTimeSchedule, PatchDeployment, PausePatchDeploymentRequest, RecurringSchedule, ResumePatchDeploymentRequest, UpdatePatchDeploymentRequest, WeekDayOfMonth, WeeklySchedule, ) from .patch_jobs import ( AptSettings, CancelPatchJobRequest, ExecStep, ExecStepConfig, ExecutePatchJobRequest, GcsObject, GetPatchJobRequest, GooSettings, Instance, ListPatchJobInstanceDetailsRequest, ListPatchJobInstanceDetailsResponse, ListPatchJobsRequest, ListPatchJobsResponse, PatchConfig, PatchInstanceFilter, PatchJob, PatchJobInstanceDetails, PatchRollout, WindowsUpdateSettings, YumSettings, ZypperSettings, ) from .vulnerability import ( CVSSv3, GetVulnerabilityReportRequest, ListVulnerabilityReportsRequest, ListVulnerabilityReportsResponse, VulnerabilityReport, ) __all__ = ( "GetInventoryRequest", "Inventory", "ListInventoriesRequest", "ListInventoriesResponse", "InventoryView", "OSPolicy", "GetOSPolicyAssignmentReportRequest", "ListOSPolicyAssignmentReportsRequest", "ListOSPolicyAssignmentReportsResponse", "OSPolicyAssignmentReport", "CreateOSPolicyAssignmentRequest", "DeleteOSPolicyAssignmentRequest", "GetOSPolicyAssignmentRequest", "ListOSPolicyAssignmentRevisionsRequest", "ListOSPolicyAssignmentRevisionsResponse", "ListOSPolicyAssignmentsRequest", "ListOSPolicyAssignmentsResponse", "OSPolicyAssignment", "OSPolicyAssignmentOperationMetadata", "UpdateOSPolicyAssignmentRequest", "FixedOrPercent", "CreatePatchDeploymentRequest", "DeletePatchDeploymentRequest", "GetPatchDeploymentRequest", "ListPatchDeploymentsRequest", "ListPatchDeploymentsResponse", "MonthlySchedule", "OneTimeSchedule", "PatchDeployment", "PausePatchDeploymentRequest", "RecurringSchedule", "ResumePatchDeploymentRequest", "UpdatePatchDeploymentRequest", "WeekDayOfMonth", "WeeklySchedule", "AptSettings", "CancelPatchJobRequest", "ExecStep", "ExecStepConfig", "ExecutePatchJobRequest", "GcsObject", "GetPatchJobRequest", "GooSettings", "Instance", "ListPatchJobInstanceDetailsRequest", "ListPatchJobInstanceDetailsResponse", "ListPatchJobsRequest", "ListPatchJobsResponse", "PatchConfig", "PatchInstanceFilter", "PatchJob", "PatchJobInstanceDetails", "PatchRollout", "WindowsUpdateSettings", "YumSettings", "ZypperSettings", "CVSSv3", "GetVulnerabilityReportRequest", "ListVulnerabilityReportsRequest", "ListVulnerabilityReportsResponse", "VulnerabilityReport", )<|fim▁end|>
GetPatchDeploymentRequest, ListPatchDeploymentsRequest, ListPatchDeploymentsResponse, MonthlySchedule,
<|file_name|>ignore.js<|end_file_name|><|fim▁begin|>import path from 'node:path'; import test from 'ava'; import slash from 'slash'; import { isIgnoredByIgnoreFiles, isIgnoredByIgnoreFilesSync, isGitIgnored, isGitIgnoredSync, } from '../ignore.js'; import { PROJECT_ROOT, getPathValues, } from './utilities.js'; const runIsIgnoredByIgnoreFiles = async (t, patterns, options, fn) => { const promisePredicate = await isIgnoredByIgnoreFiles(patterns, options); const syncPredicate = isIgnoredByIgnoreFilesSync(patterns, options); const promiseResult = fn(promisePredicate); const syncResult = fn(syncPredicate); t[Array.isArray(promiseResult) ? 'deepEqual' : 'is']( promiseResult, syncResult, 'isIgnoredByIgnoreFilesSync() result is different than isIgnoredByIgnoreFiles()', ); return promiseResult; }; const runIsGitIgnored = async (t, options, fn) => { const promisePredicate = await isGitIgnored(options); const syncPredicate = isGitIgnoredSync(options); const promiseResult = fn(promisePredicate); const syncResult = fn(syncPredicate); t[Array.isArray(promiseResult) ? 'deepEqual' : 'is']( promiseResult, syncResult, 'isGitIgnoredSync() result is different than isGitIgnored()', ); return promiseResult; }; test('ignore', async t => { for (const cwd of getPathValues(path.join(PROJECT_ROOT, 'fixtures/gitignore'))) { // eslint-disable-next-line no-await-in-loop const actual = await runIsGitIgnored( t, {cwd}, isIgnored => ['foo.js', 'bar.js'].filter(file => !isIgnored(file)), ); const expected = ['bar.js']; t.deepEqual(actual, expected); } }); test('ignore - mixed path styles', async t => { const directory = path.join(PROJECT_ROOT, 'fixtures/gitignore'); for (const cwd of getPathValues(directory)) { t.true( // eslint-disable-next-line no-await-in-loop await runIsGitIgnored( t, {cwd}, isIgnored => isIgnored(slash(path.resolve(directory, 'foo.js'))), ), ); } }); test('ignore - os paths', async t => { const directory = path.join(PROJECT_ROOT, 'fixtures/gitignore'); for (const cwd of getPathValues(directory)) { t.true( // eslint-disable-next-line no-await-in-loop await runIsGitIgnored( t, {cwd}, isIgnored => isIgnored(path.resolve(directory, 'foo.js')), ), ); } }); test('negative ignore', async t => { for (const cwd of getPathValues(path.join(PROJECT_ROOT, 'fixtures/negative'))) { // eslint-disable-next-line no-await-in-loop const actual = await runIsGitIgnored( t, {cwd}, isIgnored => ['foo.js', 'bar.js'].filter(file => !isIgnored(file)), ); const expected = ['foo.js']; t.deepEqual(actual, expected); } });<|fim▁hole|> for (const cwd of getPathValues(path.join(PROJECT_ROOT, 'fixtures/multiple-negation'))) { // eslint-disable-next-line no-await-in-loop const actual = await runIsGitIgnored( t, {cwd}, isIgnored => [ '!!!unicorn.js', '!!unicorn.js', '!unicorn.js', 'unicorn.js', ].filter(file => !isIgnored(file)), ); const expected = ['!!unicorn.js', '!unicorn.js']; t.deepEqual(actual, expected); } }); test('check file', async t => { const directory = path.join(PROJECT_ROOT, 'fixtures/gitignore'); for (const ignoredFile of getPathValues(path.join(directory, 'foo.js'))) { t.true( // eslint-disable-next-line no-await-in-loop await runIsGitIgnored( t, {cwd: directory}, isIgnored => isIgnored(ignoredFile), ), ); } for (const notIgnoredFile of getPathValues(path.join(directory, 'bar.js'))) { t.false( // eslint-disable-next-line no-await-in-loop await runIsGitIgnored( t, {cwd: directory}, isIgnored => isIgnored(notIgnoredFile), ), ); } }); test('custom ignore files', async t => { const cwd = path.join(PROJECT_ROOT, 'fixtures/ignore-files'); const files = [ 'ignored-by-eslint.js', 'ignored-by-prettier.js', 'not-ignored.js', ]; t.deepEqual( await runIsIgnoredByIgnoreFiles( t, '.eslintignore', {cwd}, isEslintIgnored => files.filter(file => isEslintIgnored(file)), ), [ 'ignored-by-eslint.js', ], ); t.deepEqual( await runIsIgnoredByIgnoreFiles( t, '.prettierignore', {cwd}, isPrettierIgnored => files.filter(file => isPrettierIgnored(file)), ), [ 'ignored-by-prettier.js', ], ); t.deepEqual( await runIsIgnoredByIgnoreFiles( t, '.{prettier,eslint}ignore', {cwd}, isEslintOrPrettierIgnored => files.filter(file => isEslintOrPrettierIgnored(file)), ), [ 'ignored-by-eslint.js', 'ignored-by-prettier.js', ], ); });<|fim▁end|>
test('multiple negation', async t => {
<|file_name|>flagSource.go<|end_file_name|><|fim▁begin|>package flag import ( "flag" "github.com/mono83/cfg" "github.com/mono83/cfg/reflect" "os" "sync" ) type flagSource struct { set *flag.FlagSet args []string m sync.Mutex values map[string]interface{} } // NewFlagSource creates new configuration source from command line flags func NewFlagSource() cfg.Configurer { return NewCustomFlagSource(flag.CommandLine, os.Args[1:]) } // NewCustomFlagSource creates new configuration source from provided // FlagSet and args func NewCustomFlagSource(source *flag.FlagSet, args []string) cfg.Configurer { return &flagSource{set: source, args: args} } func (f *flagSource) Validate() error { return f.load() }<|fim▁hole|>func (f *flagSource) load() error { f.m.Lock() defer f.m.Unlock() if !f.set.Parsed() { err := f.set.Parse(f.args) if err != nil { return err } } f.values = map[string]interface{}{} f.set.Visit(func(fl *flag.Flag) { v, ok := fl.Value.(flag.Getter) if ok { f.values[fl.Name] = v.Get() } }) return nil } func (f *flagSource) Has(key string) bool { if f.values == nil { err := f.load() if err != nil { return false } } _, ok := f.values[key] return ok } func (f *flagSource) UnmarshalKey(key string, target interface{}) error { if f.values == nil { err := f.load() if err != nil { return err } } v, ok := f.values[key] if !ok { return cfg.ErrKeyMissing{Key: key} } return reflect.CopyHelper(key, v, target) } func (f *flagSource) KeyFunc(key string) func(interface{}) error { return cfg.ExtractUnmarshalFunc(f, key) }<|fim▁end|>
<|file_name|>markdown.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use collections::HashSet; use std::{str, io}; use std::strbuf::StrBuf; use getopts; use testing; use html::escape::Escape; use html::markdown::{MarkdownWithToc, find_testable_code, reset_headers}; use test::Collector; fn load_string(input: &Path) -> io::IoResult<Option<~str>> { let mut f = try!(io::File::open(input)); let d = try!(f.read_to_end()); Ok(str::from_utf8(d.as_slice()).map(|s| s.to_owned())) } macro_rules! load_or_return { ($input: expr, $cant_read: expr, $not_utf8: expr) => { { let input = Path::new($input); match load_string(&input) { Err(e) => { let _ = writeln!(&mut io::stderr(), "error reading `{}`: {}", input.display(), e); return $cant_read; } Ok(None) => { let _ = writeln!(&mut io::stderr(), "error reading `{}`: not UTF-8", input.display()); return $not_utf8; } Ok(Some(s)) => s } } } } /// Separate any lines at the start of the file that begin with `%`. fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) { let mut metadata = Vec::new(); for line in s.lines() { if line.starts_with("%") { // remove %<whitespace> metadata.push(line.slice_from(1).trim_left()) } else { let line_start_byte = s.subslice_offset(line); return (metadata, s.slice_from(line_start_byte)); } } // if we're here, then all lines were metadata % lines. (metadata, "") } fn load_external_files(names: &[~str]) -> Option<~str> { let mut out = StrBuf::new(); for name in names.iter() {<|fim▁hole|> Some(out.into_owned()) } /// Render `input` (e.g. "foo.md") into an HTML file in `output` /// (e.g. output = "bar" => "bar/foo.html"). pub fn render(input: &str, mut output: Path, matches: &getopts::Matches) -> int { let input_p = Path::new(input); output.push(input_p.filestem().unwrap()); output.set_extension("html"); let mut css = StrBuf::new(); for name in matches.opt_strs("markdown-css").iter() { let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name); css.push_str(s) } let input_str = load_or_return!(input, 1, 2); let (in_header, before_content, after_content) = match (load_external_files(matches.opt_strs("markdown-in-header") .as_slice()), load_external_files(matches.opt_strs("markdown-before-content") .as_slice()), load_external_files(matches.opt_strs("markdown-after-content") .as_slice())) { (Some(a), Some(b), Some(c)) => (a,b,c), _ => return 3 }; let mut out = match io::File::create(&output) { Err(e) => { let _ = writeln!(&mut io::stderr(), "error opening `{}` for writing: {}", output.display(), e); return 4; } Ok(f) => f }; let (metadata, text) = extract_leading_metadata(input_str); if metadata.len() == 0 { let _ = writeln!(&mut io::stderr(), "invalid markdown file: expecting initial line with `% ...TITLE...`"); return 5; } let title = metadata.get(0).as_slice(); reset_headers(); let err = write!( &mut out, r#"<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="generator" content="rustdoc"> <title>{title}</title> {css} {in_header} </head> <body> <!--[if lte IE 8]> <div class="warning"> This old browser is unsupported and will most likely display funky things. </div> <![endif]--> {before_content} <h1 class="title">{title}</h1> {text} {after_content} </body> </html>"#, title = Escape(title), css = css, in_header = in_header, before_content = before_content, text = MarkdownWithToc(text), after_content = after_content); match err { Err(e) => { let _ = writeln!(&mut io::stderr(), "error writing to `{}`: {}", output.display(), e); 6 } Ok(_) => 0 } } /// Run any tests/code examples in the markdown file `input`. pub fn test(input: &str, libs: HashSet<Path>, mut test_args: Vec<~str>) -> int { let input_str = load_or_return!(input, 1, 2); let mut collector = Collector::new(input.to_owned(), libs, true, true); find_testable_code(input_str, &mut collector); test_args.unshift("rustdoctest".to_owned()); testing::test_main(test_args.as_slice(), collector.tests); 0 }<|fim▁end|>
out.push_str(load_or_return!(name.as_slice(), None, None)); out.push_char('\n'); }
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>#![allow(unused_variables)] // extern crate bindgen; extern crate make_cmd; use make_cmd::make; use std::env; use std::path::Path; use std::process::Command; const LIBSIXEL_DIR: &str = "libsixel"; fn main() { let testing_build = false; let out_dir = env::var("OUT_DIR").unwrap(); let out_dir = Path::new(&out_dir); println!("cargo:rustc-link-lib=dylib=sixel"); // println!("cargo:rustc-link-lib=static=sixel");<|fim▁hole|> if testing_build { return; } let curl = has_feature("curl"); let jpeg = has_feature("jpeg"); let pixbuf = has_feature("pixbuf"); let png = has_feature("png"); let gd = has_feature("gd"); let python_interface = has_feature("python_interface"); let sixel_dir = Path::new(LIBSIXEL_DIR); { let mut cmd = Command::new("./configure"); cmd.current_dir(sixel_dir) .arg("--prefix") .arg(out_dir); // cmd.arg("-fPIC"); if curl { cmd.arg("--with-libcurl"); } if gd { cmd.arg("--with-gd"); } if pixbuf { cmd.arg("--with-gdk-pixbuf"); } if jpeg { cmd.arg("--with-jpeg"); } if png { cmd.arg("--with-png"); } if !python_interface { cmd.arg("--without-python"); } cmd.status().expect("Failed to execute ./configure"); make() .arg("install") .current_dir(sixel_dir) .status().expect("Failed to execute make"); } // generate_bindings(out_dir); } // fn generate_bindings(out_dir: &Path) { // let bindings = bindgen::Builder::default() // .no_unstable_rust() // .header("wrapper.h") // .hide_type("max_align_t") // .generate() // .expect("Unable to generate bindings"); // // bindings // .write_to_file(out_dir.join("bindings.rs")) // .expect("Couldn't write bindings"); // } const FEATURE_PREFIX: &str = "CARGO_FEATURE_"; fn has_feature(feature: &'static str) -> bool { let feature = feature.to_owned().to_uppercase(); let mut name = FEATURE_PREFIX.to_owned(); name.push_str(&feature); env::var(name).is_ok() }<|fim▁end|>
println!("cargo:rustc-link-search=native={}", out_dir.join("lib").display());
<|file_name|>NewsListModel.java<|end_file_name|><|fim▁begin|>package me.knox.zmz.mvp.model; import io.reactivex.Flowable; import io.reactivex.android.schedulers.AndroidSchedulers; import java.util.List; import javax.inject.Inject; import me.knox.zmz.App; import me.knox.zmz.entity.News; import me.knox.zmz.mvp.contract.NewsListContract; import me.knox.zmz.network.JsonResponse; /**<|fim▁hole|> @Inject public NewsListModel() { // empty constructor for injection } @Override public Flowable<JsonResponse<List<News>>> getNewsList() { return App.getInstance().getApi().getNews().observeOn(AndroidSchedulers.mainThread(), true); } }<|fim▁end|>
* Created by KNOX. */ public class NewsListModel implements NewsListContract.Model {
<|file_name|>validate_urls.py<|end_file_name|><|fim▁begin|>import logging from django.core.management.base import BaseCommand from citation.ping_urls import verify_url_status logger = logging.getLogger(__name__) class Command(BaseCommand): help = '''Method that check if the code archived urls are active and working or not ''' def handle(self, *args, **options): verify_url_status() <|fim▁hole|><|fim▁end|>
logger.debug("Validation completed")
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate cgmath; #[macro_use] extern crate puck_core; extern crate alto; extern crate lewton; extern crate time; extern crate notify; extern crate rand; extern crate image; #[macro_use] extern crate gfx; extern crate gfx_device_gl; extern crate gfx_window_glutin; extern crate glutin; extern crate rayon; extern crate serde; #[macro_use] extern crate serde_derive; extern crate multimap; pub mod audio; pub mod render; pub mod app; pub mod input; pub mod dimensions; pub mod camera; pub mod resources; pub use input::*; pub use camera::*; pub use dimensions::*;<|fim▁hole|> pub type PuckResult<T> = Result<T, PuckError>; #[derive(Debug)] pub enum PuckError { IO(io::Error), FileDoesntExist(PathBuf), PipelineError(gfx::PipelineStateError<String>), CombinedGFXError(gfx::CombinedError), ContextError(glutin::ContextError), NoTexture(), NoPipeline(), BufferCreationError(gfx::buffer::CreationError), TextureCreationError(gfx::texture::CreationError), ResourceViewError(gfx::ResourceViewError), // FontLoadError(FontLoadError), ImageError(image::ImageError), MustLoadTextureBeforeFont, NoFiles, MismatchingDimensions, // path buf, expectation RenderingPipelineIncomplete, } impl From<image::ImageError> for PuckError { fn from(err: image::ImageError) -> Self { PuckError::ImageError(err) } } impl From<io::Error> for PuckError { fn from(val: io::Error) -> PuckError { PuckError::IO(val) } } #[derive(Copy, Clone)] pub struct RenderTick { pub n: u64, pub accu_alpha: f64, // percentage of a frame that has accumulated pub tick_rate: u64, // per second }<|fim▁end|>
pub use resources::*; use std::io; use std::path::PathBuf;
<|file_name|>kv.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2019 The Decred developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package lru import ( "container/list" "sync" ) // kv represents a key-value pair. type kv struct { key interface{} value interface{} } // KVCache provides a concurrency safe least-recently-used key/value cache with // nearly O(1) lookups, inserts, and deletions. The cache is limited to a // maximum number of items with eviction for the oldest entry when the // limit is exceeded. // // The NewKVCache function must be used to create a usable cache since the zero // value of this struct is not valid. type KVCache struct { mtx sync.Mutex cache map[interface{}]*list.Element // nearly O(1) lookups list *list.List // O(1) insert, update, delete limit uint } // Lookup returns the associated value of the passed key, if it is a member of // the cache. Looking up an existing item makes it the most recently used item. // // This function is safe for concurrent access. func (m *KVCache) Lookup(key interface{}) (interface{}, bool) { var value interface{} m.mtx.Lock() node, exists := m.cache[key] if exists { m.list.MoveToFront(node) pair := node.Value.(*kv) value = pair.value } m.mtx.Unlock() return value, exists } // Contains returns whether or not the passed key is a member of the cache. // The associated item of the passed key if it exists becomes the most // recently used item. // // This function is safe for concurrent access. func (m *KVCache) Contains(key interface{}) bool { m.mtx.Lock() node, exists := m.cache[key] if exists { m.list.MoveToFront(node) } m.mtx.Unlock() return exists } // Add adds the passed k/v to the cache and handles eviction of the oldest pair // if adding the new pair would exceed the max limit. Adding an existing pair // makes it the most recently used item. // // This function is safe for concurrent access. func (m *KVCache) Add(key interface{}, value interface{}) { m.mtx.Lock() defer m.mtx.Unlock() <|fim▁hole|> // When the limit is zero, nothing can be added to the cache, so just // return. if m.limit == 0 { return } // When the k/v already exists update the value and move it to the // front of the list thereby marking it most recently used. if node, exists := m.cache[key]; exists { node.Value.(*kv).value = value m.list.MoveToFront(node) m.cache[key] = node return } // Evict the least recently used k/v (back of the list) if the new // k/v would exceed the size limit for the cache. Also reuse the list // node so a new one doesn't have to be allocated. if uint(len(m.cache))+1 > m.limit { node := m.list.Back() lru := node.Value.(*kv) // Evict least recently used k/v. delete(m.cache, lru.key) // Reuse the list node of the k/v that was just evicted for the new // k/v. lru.key = key lru.value = value m.list.MoveToFront(node) m.cache[key] = node return } // The limit hasn't been reached yet, so just add the new k/v. node := m.list.PushFront(&kv{key: key, value: value}) m.cache[key] = node } // Delete deletes the k/v associated with passed key from the cache // (if it exists). // // This function is safe for concurrent access. func (m *KVCache) Delete(key interface{}) { m.mtx.Lock() if node, exists := m.cache[key]; exists { m.list.Remove(node) delete(m.cache, key) } m.mtx.Unlock() } // NewKVCache returns an initialized and empty KV LRU cache. // See the documentation for KV for more details. func NewKVCache(limit uint) KVCache { return KVCache{ cache: make(map[interface{}]*list.Element), list: list.New(), limit: limit, } }<|fim▁end|>
<|file_name|>custom.js<|end_file_name|><|fim▁begin|>/** * Template Name: Daily Shop * Version: 1.1 * Template Scripts * Author: MarkUps * Author URI: http://www.markups.io/ Custom JS 1. CARTBOX 2. TOOLTIP 3. PRODUCT VIEW SLIDER 4. POPULAR PRODUCT SLIDER (SLICK SLIDER) 5. FEATURED PRODUCT SLIDER (SLICK SLIDER) 6. LATEST PRODUCT SLIDER (SLICK SLIDER) 7. TESTIMONIAL SLIDER (SLICK SLIDER) 8. CLIENT BRAND SLIDER (SLICK SLIDER) 9. PRICE SLIDER (noUiSlider SLIDER) 10. SCROLL TOP BUTTON 11. PRELOADER 12. GRID AND LIST LAYOUT CHANGER 13. RELATED ITEM SLIDER (SLICK SLIDER) 14. TOP SLIDER (SLICK SLIDER) **/ jQuery(function($){ /* ----------------------------------------------------------- */ /* 1. CARTBOX /* ----------------------------------------------------------- */ jQuery(".aa-cartbox").hover(function(){ jQuery(this).find(".aa-cartbox-summary").fadeIn(500); } ,function(){ jQuery(this).find(".aa-cartbox-summary").fadeOut(500); } ); /* ----------------------------------------------------------- */ /* 2. TOOLTIP /* ----------------------------------------------------------- */ jQuery('[data-toggle="tooltip"]').tooltip(); jQuery('[data-toggle2="tooltip"]').tooltip(); /* ----------------------------------------------------------- */ /* 3. PRODUCT VIEW SLIDER /* ----------------------------------------------------------- */ jQuery('#demo-1 .simpleLens-thumbnails-container img').simpleGallery({ loading_image: 'img/view-slider/loading.gif' }); jQuery('#demo-1 .simpleLens-big-image').simpleLens({ loading_image: 'img/view-slider/loading.gif' }); /* ----------------------------------------------------------- */ /* 4. POPULAR PRODUCT SLIDER (SLICK SLIDER) /* ----------------------------------------------------------- */ jQuery('.aa-popular-slider').slick({ dots: false, infinite: false, speed: 300, slidesToShow: 4, slidesToScroll: 4, responsive: [ { breakpoint: 1024, settings: { slidesToShow: 3, slidesToScroll: 3, infinite: true, dots: true } }, { breakpoint: 600, settings: { slidesToShow: 2, slidesToScroll: 2 } }, { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1<|fim▁hole|> // settings: "unslick" // instead of a settings object ] }); /* ----------------------------------------------------------- */ /* 5. FEATURED PRODUCT SLIDER (SLICK SLIDER) /* ----------------------------------------------------------- */ jQuery('.aa-featured-slider').slick({ dots: false, infinite: false, speed: 300, slidesToShow: 4, slidesToScroll: 4, responsive: [ { breakpoint: 1024, settings: { slidesToShow: 3, slidesToScroll: 3, infinite: true, dots: true } }, { breakpoint: 600, settings: { slidesToShow: 2, slidesToScroll: 2 } }, { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1 } } // You can unslick at a given breakpoint now by adding: // settings: "unslick" // instead of a settings object ] }); /* ----------------------------------------------------------- */ /* 6. LATEST PRODUCT SLIDER (SLICK SLIDER) /* ----------------------------------------------------------- */ jQuery('.aa-latest-slider').slick({ dots: false, infinite: false, speed: 300, slidesToShow: 4, slidesToScroll: 4, responsive: [ { breakpoint: 1024, settings: { slidesToShow: 3, slidesToScroll: 3, infinite: true, dots: true } }, { breakpoint: 600, settings: { slidesToShow: 2, slidesToScroll: 2 } }, { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1 } } // You can unslick at a given breakpoint now by adding: // settings: "unslick" // instead of a settings object ] }); /* ----------------------------------------------------------- */ /* 7. TESTIMONIAL SLIDER (SLICK SLIDER) /* ----------------------------------------------------------- */ jQuery('.aa-testimonial-slider').slick({ dots: true, infinite: true, arrows: false, speed: 300, slidesToShow: 1, adaptiveHeight: true }); /* ----------------------------------------------------------- */ /* 8. CLIENT BRAND SLIDER (SLICK SLIDER) /* ----------------------------------------------------------- */ jQuery('.aa-client-brand-slider').slick({ dots: false, infinite: false, speed: 300, autoplay: true, autoplaySpeed: 2000, slidesToShow: 5, slidesToScroll: 1, responsive: [ { breakpoint: 1024, settings: { slidesToShow: 4, slidesToScroll: 4, infinite: true, dots: true } }, { breakpoint: 600, settings: { slidesToShow: 2, slidesToScroll: 2 } }, { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1 } } // You can unslick at a given breakpoint now by adding: // settings: "unslick" // instead of a settings object ] }); /* ----------------------------------------------------------- */ /* 9. PRICE SLIDER (noUiSlider SLIDER) /* ----------------------------------------------------------- */ jQuery(function(){ if($('body').is('.productPage')){ var skipSlider = document.getElementById('skipstep'); noUiSlider.create(skipSlider, { range: { 'min': 0, '10%': 10, '20%': 20, '30%': 30, '40%': 40, '50%': 50, '60%': 60, '70%': 70, '80%': 80, '90%': 90, 'max': 100 }, snap: true, connect: true, start: [20, 70] }); // for value print var skipValues = [ document.getElementById('skip-value-lower'), document.getElementById('skip-value-upper') ]; skipSlider.noUiSlider.on('update', function( values, handle ) { skipValues[handle].innerHTML = values[handle]; }); } }); /* ----------------------------------------------------------- */ /* 10. SCROLL TOP BUTTON /* ----------------------------------------------------------- */ //Check to see if the window is top if not then display button jQuery(window).scroll(function(){ if ($(this).scrollTop() > 300) { $('.scrollToTop').fadeIn(); } else { $('.scrollToTop').fadeOut(); } }); //Click event to scroll to top jQuery('.scrollToTop').click(function(){ $('html, body').animate({scrollTop : 0},800); return false; }); /* ----------------------------------------------------------- */ /* 11. PRELOADER /* ----------------------------------------------------------- */ jQuery(window).load(function() { // makes sure the whole site is loaded jQuery('#wpf-loader-two').delay(200).fadeOut('slow'); // will fade out }) /* ----------------------------------------------------------- */ /* 12. GRID AND LIST LAYOUT CHANGER /* ----------------------------------------------------------- */ jQuery("#list-catg").click(function(e){ e.preventDefault(e); jQuery(".aa-product-catg").addClass("list"); }); jQuery("#grid-catg").click(function(e){ e.preventDefault(e); jQuery(".aa-product-catg").removeClass("list"); }); /* ----------------------------------------------------------- */ /* 13. RELATED ITEM SLIDER (SLICK SLIDER) /* ----------------------------------------------------------- */ jQuery('.aa-related-item-slider').slick({ dots: false, infinite: false, speed: 300, slidesToShow: 4, slidesToScroll: 4, responsive: [ { breakpoint: 1024, settings: { slidesToShow: 3, slidesToScroll: 3, infinite: true, dots: true } }, { breakpoint: 600, settings: { slidesToShow: 2, slidesToScroll: 2 } }, { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1 } } // You can unslick at a given breakpoint now by adding: // settings: "unslick" // instead of a settings object ] }); /* ----------------------------------------------------------- */ /* 14. TOP SLIDER (SLICK SLIDER) /* ----------------------------------------------------------- */ jQuery('.seq-canvas').slick({ dots: false, infinite: true, speed: 500, fade: true, cssEase: 'linear' }); });<|fim▁end|>
} } // You can unslick at a given breakpoint now by adding:
<|file_name|>clients.rs<|end_file_name|><|fim▁begin|>#![feature(proc_macro, plugin, custom_attribute, custom_derive, plugin)] #![plugin(serde_derive)] extern crate tin_can_telephone as tct; use std::net::{TcpStream as StdTcpStream, SocketAddr}; use std::io::{ //Read, Write}; #[test] fn login() {<|fim▁hole|> Err(_) => panic!("Error, server not available, \ try running 'cargo run --example server'"), }; let mut tcp_b = match StdTcpStream::connect(&addr) { Ok(stream) => stream, Err(_) => panic!("Error, server not available, \ try running 'cargo run --example server'"), }; let login_a = "\"LoginCredentials\"{\"user\":\"login_a\"}\n".as_bytes(); let login_b = "\"LoginCredentials\"{\"user\":\"login_b\"}\n".as_bytes(); tcp_a.write(login_a).unwrap(); tcp_a.flush().unwrap(); tcp_b.write(login_b).unwrap(); tcp_b.flush().unwrap(); println!("Verify logins via server log, this point means succesful connection"); }<|fim▁end|>
let addr = "127.0.0.1:3000".to_string().parse::<SocketAddr>().unwrap(); let mut tcp_a = match StdTcpStream::connect(&addr) { Ok(stream) => stream,
<|file_name|>findBreaksVCF.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import re import sys import os import getopt import vcf def main(): params = parseArgs() vfh = vcf.Reader(open(params.vcf, 'r')) #grab contig sizes contigs = dict() for c,s in vfh.contigs.items(): contigs[s.id] = s.length regions = list() this_chrom = None start = int() stop = int() count = 0 for rec in vfh: if not this_chrom: this_chrom = rec.CHROM start = 1 stop = 1 count = 0 #If we entered new chromosome, submit old break elif this_chrom != rec.CHROM: t = tuple([this_chrom, start, contigs[this_chrom]]) regions.append(t) this_chrom = rec.CHROM start = 1 stop = 1 count = 0 #if this SNP is parsimony-informative if rec.is_snp and not rec.is_monomorphic: #Check if parsimony-informative if is_PIS(rec): count+=1 #if this is the final PIS, submit region to list if count == params.force: stop = rec.POS t = tuple([this_chrom, start, stop]) regions.append(t) start = stop + 1 count = 0 t = tuple([this_chrom, start, contigs[this_chrom]]) regions.append(t) print("Writing regions to out.regions...") write_regions("out.regions", regions) #Function to write list of regions tuples, in GATK format def write_regions(f, r): with open(f, 'w') as fh: try: for reg in r: ol = str(reg[0]) + ":" + str(reg[1]) + "-" + str(reg[2]) + "\n" fh.write(ol) except IOError as e: print("Could not read file %s: %s"%(f,e)) sys.exit(1) except Exception as e: print("Unexpected error reading file %s: %s"%(f,e)) sys.exit(1) finally: fh.close() #Function to check pyVCF record for if parsimony informative or not def is_PIS(r): ref=0 alt=0 for call in r.samples: if call.gt_type: if call.gt_type == 0: ref += 1 elif call.gt_type == 1: alt += 1 elif call.gt_type == 2: alt += 1 ref += 1 if ref >= 2 and alt >= 2: return(True) if ref <= 2 and alt <= 2: return(False) #Object to parse command-line arguments class parseArgs(): def __init__(self): #Define options try: options, remainder = getopt.getopt(sys.argv[1:], 'v:f:h', \ ["vcf=" "help", "force="]) except getopt.GetoptError as err: print(err) self.display_help("\nExiting because getopt returned non-zero exit status.") #Default values for params #Input params self.vcf=None self.force=100000 #First pass to see if help menu was called for o, a in options: if o in ("-h", "-help", "--help"): self.display_help("Exiting because help menu was called.") #Second pass to set all args. for opt, arg_raw in options: arg = arg_raw.replace(" ","") arg = arg.strip() opt = opt.replace("-","") #print(opt,arg) if opt in ('v', 'vcf'): self.vcf = arg elif opt in ('f','force'): self.force=int(arg) elif opt in ('h', 'help'): pass else: assert False, "Unhandled option %r"%opt #Check manditory options are set if not self.vcf: self.display_help("Must provide VCF file <-v,--vcf>") def display_help(self, message=None): if message is not None: print() print (message) print ("\nfindBreaksVCF.py\n") print ("Contact:Tyler K. Chafin, University of Arkansas,[email protected]") print ("\nUsage: ", sys.argv[0], "-v <input.vcf> -f <100000>\n") print ("Description: Breaks chromosomes into chunks of X parsimony-informative sites, for running MDL") print(""" Arguments: -v,--vcf : VCF file for parsing<|fim▁hole|> """) print() sys.exit() #Call main function if __name__ == '__main__': main()<|fim▁end|>
-f,--force : Number of PIS to force a break -h,--help : Displays help menu
<|file_name|>myparser.py<|end_file_name|><|fim▁begin|>import sys, argparse class MyParser(argparse.ArgumentParser): def error(self, message):<|fim▁hole|> sys.exit(2)<|fim▁end|>
'''Wraps error and prints in a shorter way''' sys.stderr.write('error: %s\n' % message) #self.print_help()
<|file_name|>chrome_url_request_user_data.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/renderer_host/chrome_url_request_user_data.h" namespace { const char* const kKeyName = "chrome_url_request_user_data"; } // namespace ChromeURLRequestUserData::ChromeURLRequestUserData() : is_prerender_(false) { } // static ChromeURLRequestUserData* ChromeURLRequestUserData::Get( const net::URLRequest* request) { DCHECK(request); return static_cast<ChromeURLRequestUserData*>(request->GetUserData(kKeyName)); } // static ChromeURLRequestUserData* ChromeURLRequestUserData::Create( net::URLRequest* request) { DCHECK(request); DCHECK(!Get(request)); ChromeURLRequestUserData* user_data = new ChromeURLRequestUserData(); request->SetUserData(kKeyName, user_data);<|fim▁hole|>} // static void ChromeURLRequestUserData::Delete(net::URLRequest* request) { DCHECK(request); request->SetUserData(kKeyName, NULL); }<|fim▁end|>
return user_data;
<|file_name|>App.js<|end_file_name|><|fim▁begin|>import React from 'react'; import PropTypes from 'prop-types'; import Header from './Header'; // This is a class-based component because the current // version of hot reloading won't hot reload a stateless<|fim▁hole|> render() { return ( <div className="container"> <Header/> {this.props.children} </div> ); } } App.propTypes = { children: PropTypes.element }; export default App;<|fim▁end|>
// component at the top-level. class App extends React.Component {
<|file_name|>writehtml.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from troubleshooting.framework.modules.manager import ManagerFactory from troubleshooting.framework.variable.variable import * from troubleshooting.framework.libraries.baseList import list2stringAndFormat from troubleshooting.framework.libraries.system import createDir from troubleshooting.framework.modules.configuration import ConfigManagerInstance import time import os,sys from htmltemplate import * import re class html(object): def __init__(self): super(html,self).__init__() self.caseResult = ManagerFactory().getManager(LAYER.Case).case_record self.currenttime = time.strftime("%Y-%m-%d %X %Z",time.localtime()) def write(self): data = "" data += HTML_BEFORE data += HTML_HEAD data +=""" <body bgcolor = "#E9EAEE"> <h1 align="center">TroubleShooting Framework Report</h1> <p><i>%s</i></p> <table width="100%%" border="2" class="bordered"> <thead> <tr ><th width="15%%">CaseName</th><th width="5%%" >Status</th><th width="80%%">Attribute</th></tr> </thead> <tbody> """%(self.currenttime,) recovery_id = 1 for i,caseName in enumerate(self.caseResult): i += 1 caseStatus = self.caseResult[caseName]["STATUS"] DESCRIPTION = self.caseResult[caseName]["DESCRIPTION"] REFERENCE = self.caseResult[caseName]["REFERENCE"] REFERENCEHtml = '<a href="%s">reference document</>'%REFERENCE if REFERENCE else '<font color="#d0d0d0">NA</font>' TAGS = self.caseResult[caseName]["TAGS"] TESTPOINT = self.caseResult[caseName]["TESTPOINT"] parent_pass = """ <tr bgcolor="#53C579" class="parent" id="row_0%s"><td colspan="1">%s</td><td>PASS</td><td colspan="1"></td></tr>"""%(i,caseName,) parent_fail = """ <tr bgcolor="#FF3030" class="parent" id="row_0%s"><td colspan="1">%s</td><td>FAIL</td><td colspan="1"></td></tr>"""%(i,caseName,) parent_warn = """ <tr bgcolor="#FF7F00" class="parent" id="row_0%s"><td colspan="1">%s</td><td>WARN</td><td colspan="1"></td></tr>"""%(i,caseName,) if caseStatus: data += parent_pass else: _level = self.caseResult[caseName]["LEVEL"] if _level is LEVEL.CRITICAL: data += parent_fail else: data += parent_warn data += """ <tr class="child_row_0%s" style="display:none"><td>Description</td><td></td><td>%s</td></tr> <tr class="child_row_0%s" style="display:none"><td>Reference</td><td></td><td>%s</td></tr> <tr class="child_row_0%s" style="display:none"><td>Tags</td><td></td><td>%s</td></tr> """%(i,DESCRIPTION,i,REFERENCEHtml,i,TAGS) data += """ <tr class="child_row_0%s" style="display:none"> <td colspan="3" > <table border="1" width="100%%" style="margin:0px"> """%i data += """ <tr> <th width="5%%"> <b>TestPoint</b> </th> <th width="5%%"> <b>Status</b> </th> <th width="5%%"> <b>Level</b> </th> <th width="15%%" name="nolog"> <b>Impact</b> </th> <th width="35%%" name="nolog"> <b>Root Cause</b> </th> <th width="15%%" name="nolog"> <b>Fix Method</b> </th> <th width="20%%" name="nolog"> <b>Auto Fix Method</b> </th> <th style="display:none;" width="85%%" name="log"> <b>LOG</b> </th> </tr> """ for testpoint in TESTPOINT: testpointStatus = TESTPOINT[testpoint]["STATUS"] testpointStatusHtml = '<font color="green"><b><i>%s</i></b></font>' % STATUS.PASS.value.lower() if testpointStatus else '<font color="red"><b><i>%s</i></b></font>' % STATUS.FAIL.value.lower() testpointImpact = TESTPOINT[testpoint]["IMPACT"] testpointImpact = list2stringAndFormat(testpointImpact) if not testpointImpact: testpointImpact = '<font color="#d0d0d0">NA</font>' testpointImpactHtml = testpointImpact.replace("\n","</br>") testpointLevel = TESTPOINT[testpoint]["LEVEL"] testpointLevelHtml = testpointLevel.value testpointDescribe = TESTPOINT[testpoint]["DESCRIBE"] testpointRCA = TESTPOINT[testpoint]["RCA"] testpointRCA = list2stringAndFormat(testpointRCA) if not testpointRCA: testpointRCA = '<font color="#d0d0d0">NA</font>' testpointRCAHtml = testpointRCA.replace("\n","</br>") testpointFIXSTEP = TESTPOINT[testpoint]["FIXSTEP"] testpointFIXSTEP = list2stringAndFormat(testpointFIXSTEP) if not testpointFIXSTEP: testpointFIXSTEP = '<font color="#d0d0d0">NA</font>' testpointFIXSTEPHtml = testpointFIXSTEP.replace("\n","</br>") testpointAutoFixStep = TESTPOINT[testpoint]["AUTOFIXSTEP"] if not testpointAutoFixStep: testpointAutoFixStep = '<font color="#d0d0d0">NA</font>' else: if ConfigManagerInstance.config["Host"]: reportHash = ConfigManagerInstance.config["__ReportHash__"] reportName = ConfigManagerInstance.config["__ReportName__"] host = ConfigManagerInstance.config["Host"] port = ConfigManagerInstance.config["Port"] user = ConfigManagerInstance.config["User"] password = ConfigManagerInstance.config["Password"] cwd =ConfigManagerInstance.config["__ProjectCWD__"] recovery = {"ProjectDir":cwd,"Host":host,"Port":port,"User":user,"Password":password,"Recovery":",".join(testpointAutoFixStep)} testpointAutoFixStep = """ <iframe scrolling="no" src="/www/iframe/growl-genie.html?recovery=%s&reportHash=%s&reportName=%s"></iframe> """%(recovery,reportHash,reportName) testpointAutoFixStepHtml = testpointAutoFixStep testpointLog = TESTPOINT[testpoint]["LOG"] testpointLogHtml = testpointLog pattern = re.compile(r"\<.+\>") match = pattern.finditer(testpointLog) if match: for m in match: className = m.group() testpointLogHtml = testpointLogHtml.replace(className,'<font color="#FFB90F">%s</font>'%className) testpointLogHtml = testpointLogHtml.replace("\n", "</br>") testpointTimeout = TESTPOINT[testpoint]["TIMEOUT"] testpointCost = TESTPOINT[testpoint]["COST"] testpointHtml = '<i title="Timeout: %s\nCostTime: %s">%s<i>'%(testpointTimeout,testpointCost,testpoint.strip("{}")) attribute = """ <tr> <td> <i>%s</i> </td> <td> <i>%s</i> </td> <td> <i>%s</i> </td> <td name="nolog"> <i>%s</i> </td> <td name="nolog"> <i>%s</i> </td> <td name="nolog"> <i>%s</i> </td> <td name="nolog"> <i>%s</i> </td> <td style="display:none" name="log"> <i>%s</i> </td> </tr> """%(testpointHtml,testpointStatusHtml,testpointLevelHtml,testpointImpactHtml,testpointRCAHtml,testpointFIXSTEPHtml,testpointAutoFixStepHtml,testpointLogHtml) data += attribute data += """ </table> </td> </tr> """ data += """ </tbody> </table> """ data += BUTTON # data += HTML_LOG data += BODY_AFTER data += HTML_AFTER reportDir = os.path.dirname(ConfigManagerInstance.config["Report"]) createDir(reportDir)<|fim▁hole|> reportPath = ConfigManagerInstance.config["Report"] with open(reportPath,"w") as f: f.write(data)<|fim▁end|>
<|file_name|>irc-colors.ts<|end_file_name|><|fim▁begin|>declare module 'irc-colors' { function stripColorsAndStyle(data: string): string;<|fim▁hole|><|fim▁end|>
}
<|file_name|>listpages.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- r""" Print a list of pages, as defined by page generator parameters. Optionally, it also prints page content to STDOUT or save it to a file in the current directory. These parameters are supported to specify which pages titles to print: -format Defines the output format. Can be a custom string according to python string.format() notation or can be selected by a number from following list (1 is default format): 1 - u'{num:4d} {page.title}' --> 10 PageTitle 2 - u'{num:4d} [[{page.title}]]' --> 10 [[PageTitle]] 3 - u'{page.title}' --> PageTitle 4 - u'[[{page.title}]]' --> [[PageTitle]] 5 - u'{num:4d} \03{{lightred}}{page.loc_title:<40}\03{{default}}' --> 10 localised_Namespace:PageTitle (colorised in lightred) 6 - u'{num:4d} {page.loc_title:<40} {page.can_title:<40}' --> 10 localised_Namespace:PageTitle canonical_Namespace:PageTitle 7 - u'{num:4d} {page.loc_title:<40} {page.trs_title:<40}' --> 10 localised_Namespace:PageTitle outputlang_Namespace:PageTitle (*) requires "outputlang:lang" set. num is the sequential number of the listed page. An empty format is equal to -notitle and just shows the total amount of pages. -outputlang Language for translation of namespaces. -notitle Page title is not printed. -get Page content is printed. -save Save Page content to a file named as page.title(as_filename=True). Directory can be set with -save:dir_name If no dir is specified, current direcory will be used. -encode File encoding can be specified with '-encode:name' (name must be a valid python encoding: utf-8, etc.). If not specified, it defaults to config.textfile_encoding. -put: Save the list to the defined page of the wiki. By default it does not overwrite an exisiting page. -overwrite Overwrite the page if it exists. Can only by applied with -put. -summary: The summary text when the page is written. If it's one word just containing letters, dashes and underscores it uses that as a translation key. Custom format can be applied to the following items extrapolated from a page object: site: obtained from page._link._site. title: obtained from page._link._title. loc_title: obtained from page._link.canonical_title(). can_title: obtained from page._link.ns_title(). based either the canonical namespace name or on the namespace name in the language specified by the -trans param; a default value '******' will be used if no ns is found. onsite: obtained from pywikibot.Site(outputlang, self.site.family). trs_title: obtained from page._link.ns_title(onsite=onsite). If selected format requires trs_title, outputlang must be set. &params; """ # # (C) Pywikibot team, 2008-2017 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals import os import re import pywikibot from pywikibot import config2 as config, i18n from pywikibot.pagegenerators import GeneratorFactory, parameterHelp docuReplacements = {'&params;': parameterHelp} class Formatter(object): """Structure with Page attributes exposed for formatting from cmd line.""" fmt_options = { '1': u"{num:4d} {page.title}", '2': u"{num:4d} [[{page.title}]]", '3': u"{page.title}", '4': u"[[{page.title}]]", '5': u"{num:4d} \03{{lightred}}{page.loc_title:<40}\03{{default}}", '6': u"{num:4d} {page.loc_title:<40} {page.can_title:<40}", '7': u"{num:4d} {page.loc_title:<40} {page.trs_title:<40}", } # Identify which formats need outputlang fmt_need_lang = [k for k, v in fmt_options.items() if 'trs_title' in v] def __init__(self, page, outputlang=None, default='******'): """ Constructor. @param page: the page to be formatted. @type page: Page object. @param outputlang: language code in which namespace before title should be translated. Page ns will be searched in Site(outputlang, page.site.family) and, if found, its custom name will be used in page.title(). @type outputlang: str or None, if no translation is wanted. @param default: default string to be used if no corresponding namespace is found when outputlang is not None. """ self.site = page._link.site self.title = page._link.title self.loc_title = page._link.canonical_title() self.can_title = page._link.ns_title() self.outputlang = outputlang if outputlang is not None: # Cache onsite in case of translations. if not hasattr(self, "onsite"): self.onsite = pywikibot.Site(outputlang, self.site.family) try: self.trs_title = page._link.ns_title(onsite=self.onsite) # Fallback if no corresponding namespace is found in onsite. except pywikibot.Error: self.trs_title = u'%s:%s' % (default, page._link.title) def output(self, num=None, fmt=1): """Output formatted string.""" fmt = self.fmt_options.get(fmt, fmt) # If selected format requires trs_title, outputlang must be set. if (fmt in self.fmt_need_lang or 'trs_title' in fmt and self.outputlang is None): raise ValueError( u"Required format code needs 'outputlang' parameter set.") if num is None: return fmt.format(page=self) else: return fmt.format(num=num, page=self) def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ gen = None notitle = False fmt = '1' outputlang = None page_get = False base_dir = None<|fim▁hole|> overwrite = False summary = 'listpages-save-list' # Process global args and prepare generator args parser local_args = pywikibot.handle_args(args) genFactory = GeneratorFactory() for arg in local_args: option, sep, value = arg.partition(':') if option == '-notitle': notitle = True elif option == '-format': fmt = value.replace('\\03{{', '\03{{') if not fmt.strip(): notitle = True elif option == '-outputlang:': outputlang = value elif option == '-get': page_get = True elif option == '-save': base_dir = value or '.' elif option == '-encode': encoding = value elif option == '-put': page_target = value elif option == '-overwrite': overwrite = True elif option == '-summary': summary = value else: genFactory.handleArg(arg) if base_dir: base_dir = os.path.expanduser(base_dir) if not os.path.isabs(base_dir): base_dir = os.path.normpath(os.path.join(os.getcwd(), base_dir)) if not os.path.exists(base_dir): pywikibot.output(u'Directory "%s" does not exist.' % base_dir) choice = pywikibot.input_yn( u'Do you want to create it ("No" to continue without saving)?') if choice: os.makedirs(base_dir, mode=0o744) else: base_dir = None elif not os.path.isdir(base_dir): # base_dir is a file. pywikibot.warning(u'Not a directory: "%s"\n' u'Skipping saving ...' % base_dir) base_dir = None if page_target: site = pywikibot.Site() page_target = pywikibot.Page(site, page_target) if not overwrite and page_target.exists(): pywikibot.bot.suggest_help( additional_text='Page {0} already exists.\n' 'You can use the -overwrite argument to ' 'replace the content of this page.' .format(page_target.title(asLink=True))) return False if re.match('^[a-z_-]+$', summary): summary = i18n.twtranslate(site, summary) gen = genFactory.getCombinedGenerator() if gen: i = 0 output_list = [] for i, page in enumerate(gen, start=1): if not notitle: page_fmt = Formatter(page, outputlang) output_list += [page_fmt.output(num=i, fmt=fmt)] pywikibot.stdout(output_list[-1]) if page_get: try: pywikibot.stdout(page.text) except pywikibot.Error as err: pywikibot.output(err) if base_dir: filename = os.path.join(base_dir, page.title(as_filename=True)) pywikibot.output(u'Saving %s to %s' % (page.title(), filename)) with open(filename, mode='wb') as f: f.write(page.text.encode(encoding)) pywikibot.output(u"%i page(s) found" % i) if page_target: page_target.text = '\n'.join(output_list) page_target.save(summary=summary) return True else: pywikibot.bot.suggest_help(missing_generator=True) return False if __name__ == "__main__": main()<|fim▁end|>
encoding = config.textfile_encoding page_target = None
<|file_name|>callee.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use super::autoderef; use super::check_argument_types; use super::check_expr; use super::check_method_argument_types; use super::demand; use super::DeferredCallResolution; use super::err_args; use super::Expectation; use super::expected_types_for_fn_args; use super::FnCtxt; use super::LvaluePreference; use super::method; use super::structurally_resolved_type; use super::TupleArgumentsFlag; use super::UnresolvedTypeAction; use super::write_call; use CrateCtxt; use middle::infer; use middle::ty::{self, Ty, ClosureTyper}; use syntax::ast; use syntax::codemap::Span; use syntax::parse::token; use syntax::ptr::P; /// Check that it is legal to call methods of the trait corresponding /// to `trait_id` (this only cares about the trait, not the specific /// method that is called) pub fn check_legal_trait_for_method_call(ccx: &CrateCtxt, span: Span, trait_id: ast::DefId) { let tcx = ccx.tcx; let did = Some(trait_id); let li = &tcx.lang_items; if did == li.drop_trait() { span_err!(tcx.sess, span, E0040, "explicit use of destructor method"); } else if !tcx.sess.features.borrow().unboxed_closures { // the #[feature(unboxed_closures)] feature isn't // activated so we need to enforce the closure // restrictions. let method = if did == li.fn_trait() { "call" } else if did == li.fn_mut_trait() { "call_mut" } else if did == li.fn_once_trait() { "call_once" } else { return // not a closure method, everything is OK. }; span_err!(tcx.sess, span, E0174, "explicit use of unboxed closure method `{}` is experimental", method); fileline_help!(tcx.sess, span, "add `#![feature(unboxed_closures)]` to the crate attributes to enable"); } } pub fn check_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, call_expr: &'tcx ast::Expr, callee_expr: &'tcx ast::Expr, arg_exprs: &'tcx [P<ast::Expr>], expected: Expectation<'tcx>) { check_expr(fcx, callee_expr); let original_callee_ty = fcx.expr_ty(callee_expr); let (callee_ty, _, result) = autoderef(fcx, callee_expr.span, original_callee_ty, Some(callee_expr), UnresolvedTypeAction::Error, LvaluePreference::NoPreference, |adj_ty, idx| { try_overloaded_call_step(fcx, call_expr, callee_expr, adj_ty, idx) }); match result { None => { // this will report an error since original_callee_ty is not a fn confirm_builtin_call(fcx, call_expr, original_callee_ty, arg_exprs, expected); } Some(CallStep::Builtin) => { confirm_builtin_call(fcx, call_expr, callee_ty, arg_exprs, expected); } Some(CallStep::DeferredClosure(fn_sig)) => { confirm_deferred_closure_call(fcx, call_expr, arg_exprs, expected, fn_sig); } Some(CallStep::Overloaded(method_callee)) => { confirm_overloaded_call(fcx, call_expr, callee_expr, arg_exprs, expected, method_callee); } } } enum CallStep<'tcx> { Builtin, DeferredClosure(ty::FnSig<'tcx>), Overloaded(ty::MethodCallee<'tcx>) } fn try_overloaded_call_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, call_expr: &'tcx ast::Expr, callee_expr: &'tcx ast::Expr, adjusted_ty: Ty<'tcx>, autoderefs: usize) -> Option<CallStep<'tcx>> { debug!("try_overloaded_call_step(call_expr={:?}, adjusted_ty={:?}, autoderefs={})", call_expr, adjusted_ty, autoderefs); // If the callee is a bare function or a closure, then we're all set. match structurally_resolved_type(fcx, callee_expr.span, adjusted_ty).sty { ty::TyBareFn(..) => { fcx.write_autoderef_adjustment(callee_expr.id, autoderefs); return Some(CallStep::Builtin); } ty::TyClosure(def_id, substs) => { assert_eq!(def_id.krate, ast::LOCAL_CRATE); // Check whether this is a call to a closure where we // haven't yet decided on whether the closure is fn vs // fnmut vs fnonce. If so, we have to defer further processing. if fcx.closure_kind(def_id).is_none() { let closure_ty = fcx.closure_type(def_id, substs); let fn_sig = fcx.infcx().replace_late_bound_regions_with_fresh_var(call_expr.span, infer::FnCall, &closure_ty.sig).0; fcx.record_deferred_call_resolution(def_id, Box::new(CallResolution { call_expr: call_expr, callee_expr: callee_expr, adjusted_ty: adjusted_ty, autoderefs: autoderefs, fn_sig: fn_sig.clone(), closure_def_id: def_id })); return Some(CallStep::DeferredClosure(fn_sig)); } } // Hack: we know that there are traits implementing Fn for &F // where F:Fn and so forth. In the particular case of types // like `x: &mut FnMut()`, if there is a call `x()`, we would // normally translate to `FnMut::call_mut(&mut x, ())`, but // that winds up requiring `mut x: &mut FnMut()`. A little // over the top. The simplest fix by far is to just ignore // this case and deref again, so we wind up with // `FnMut::call_mut(&mut *x, ())`. ty::TyRef(..) if autoderefs == 0 => { return None; } _ => {} } try_overloaded_call_traits(fcx, call_expr, callee_expr, adjusted_ty, autoderefs) .map(|method_callee| CallStep::Overloaded(method_callee)) } fn try_overloaded_call_traits<'a,'tcx>(fcx: &FnCtxt<'a, 'tcx>, call_expr: &ast::Expr, callee_expr: &ast::Expr, adjusted_ty: Ty<'tcx>, autoderefs: usize) -> Option<ty::MethodCallee<'tcx>> { // Try the options that are least restrictive on the caller first. for &(opt_trait_def_id, method_name) in &[ (fcx.tcx().lang_items.fn_trait(), token::intern("call")), (fcx.tcx().lang_items.fn_mut_trait(), token::intern("call_mut")), (fcx.tcx().lang_items.fn_once_trait(), token::intern("call_once")), ] { let trait_def_id = match opt_trait_def_id { Some(def_id) => def_id, None => continue, }; match method::lookup_in_trait_adjusted(fcx, call_expr.span, Some(&*callee_expr), method_name, trait_def_id, autoderefs, false, adjusted_ty, None) { None => continue, Some(method_callee) => { return Some(method_callee); } } } None } fn confirm_builtin_call<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, call_expr: &ast::Expr, callee_ty: Ty<'tcx>, arg_exprs: &'tcx [P<ast::Expr>], expected: Expectation<'tcx>) { let error_fn_sig; let fn_sig = match callee_ty.sty { ty::TyBareFn(_, &ty::BareFnTy {ref sig, ..}) => { sig } _ => { fcx.type_error_message(call_expr.span, |actual| { format!("expected function, found `{}`", actual) }, callee_ty, None); // This is the "default" function signature, used in case of error. // In that case, we check each argument against "error" in order to // set up all the node type bindings. error_fn_sig = ty::Binder(ty::FnSig { inputs: err_args(fcx.tcx(), arg_exprs.len()), output: ty::FnConverging(fcx.tcx().types.err), variadic: false }); &error_fn_sig } }; // Replace any late-bound regions that appear in the function // signature with region variables. We also have to // renormalize the associated types at this point, since they // previously appeared within a `Binder<>` and hence would not // have been normalized before. let fn_sig = fcx.infcx().replace_late_bound_regions_with_fresh_var(call_expr.span, infer::FnCall, fn_sig).0; let fn_sig = fcx.normalize_associated_types_in(call_expr.span, &fn_sig); // Call the generic checker. let expected_arg_tys = expected_types_for_fn_args(fcx, call_expr.span, expected, fn_sig.output, &fn_sig.inputs); check_argument_types(fcx, call_expr.span, &fn_sig.inputs, &expected_arg_tys[..], arg_exprs, fn_sig.variadic, TupleArgumentsFlag::DontTupleArguments); write_call(fcx, call_expr, fn_sig.output);<|fim▁hole|>} fn confirm_deferred_closure_call<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, call_expr: &ast::Expr, arg_exprs: &'tcx [P<ast::Expr>], expected: Expectation<'tcx>, fn_sig: ty::FnSig<'tcx>) { // `fn_sig` is the *signature* of the cosure being called. We // don't know the full details yet (`Fn` vs `FnMut` etc), but we // do know the types expected for each argument and the return // type. let expected_arg_tys = expected_types_for_fn_args(fcx, call_expr.span, expected, fn_sig.output.clone(), &*fn_sig.inputs); check_argument_types(fcx, call_expr.span, &*fn_sig.inputs, &*expected_arg_tys, arg_exprs, fn_sig.variadic, TupleArgumentsFlag::TupleArguments); write_call(fcx, call_expr, fn_sig.output); } fn confirm_overloaded_call<'a,'tcx>(fcx: &FnCtxt<'a, 'tcx>, call_expr: &ast::Expr, callee_expr: &'tcx ast::Expr, arg_exprs: &'tcx [P<ast::Expr>], expected: Expectation<'tcx>, method_callee: ty::MethodCallee<'tcx>) { let output_type = check_method_argument_types(fcx, call_expr.span, method_callee.ty, callee_expr, arg_exprs, TupleArgumentsFlag::TupleArguments, expected); write_call(fcx, call_expr, output_type); write_overloaded_call_method_map(fcx, call_expr, method_callee); } fn write_overloaded_call_method_map<'a,'tcx>(fcx: &FnCtxt<'a, 'tcx>, call_expr: &ast::Expr, method_callee: ty::MethodCallee<'tcx>) { let method_call = ty::MethodCall::expr(call_expr.id); fcx.inh.method_map.borrow_mut().insert(method_call, method_callee); } #[derive(Debug)] struct CallResolution<'tcx> { call_expr: &'tcx ast::Expr, callee_expr: &'tcx ast::Expr, adjusted_ty: Ty<'tcx>, autoderefs: usize, fn_sig: ty::FnSig<'tcx>, closure_def_id: ast::DefId, } impl<'tcx> DeferredCallResolution<'tcx> for CallResolution<'tcx> { fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>) { debug!("DeferredCallResolution::resolve() {:?}", self); // we should not be invoked until the closure kind has been // determined by upvar inference assert!(fcx.closure_kind(self.closure_def_id).is_some()); // We may now know enough to figure out fn vs fnmut etc. match try_overloaded_call_traits(fcx, self.call_expr, self.callee_expr, self.adjusted_ty, self.autoderefs) { Some(method_callee) => { // One problem is that when we get here, we are going // to have a newly instantiated function signature // from the call trait. This has to be reconciled with // the older function signature we had before. In // principle we *should* be able to fn_sigs(), but we // can't because of the annoying need for a TypeTrace. // (This always bites me, should find a way to // refactor it.) let method_sig = ty::no_late_bound_regions(fcx.tcx(), ty::ty_fn_sig(method_callee.ty)).unwrap(); debug!("attempt_resolution: method_callee={:?}", method_callee); for (&method_arg_ty, &self_arg_ty) in method_sig.inputs[1..].iter().zip(&self.fn_sig.inputs) { demand::eqtype(fcx, self.call_expr.span, self_arg_ty, method_arg_ty); } let nilty = ty::mk_nil(fcx.tcx()); demand::eqtype(fcx, self.call_expr.span, method_sig.output.unwrap_or(nilty), self.fn_sig.output.unwrap_or(nilty)); write_overloaded_call_method_map(fcx, self.call_expr, method_callee); } None => { fcx.tcx().sess.span_bug( self.call_expr.span, "failed to find an overloaded call trait for closure call"); } } } }<|fim▁end|>
<|file_name|>test_avatar.py<|end_file_name|><|fim▁begin|># This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import defer from twisted.trial import unittest from buildbot.test.fake import httpclientservice as fakehttpclientservice from buildbot.test.util import www from buildbot.test.util.misc import TestReactorMixin from buildbot.www import auth from buildbot.www import avatar class TestAvatar(avatar.AvatarBase): def getUserAvatar(self, email, username, size, defaultAvatarUrl): return defer.succeed((b"image/png", '{!r} {!r} {!r}'.format( email, size, defaultAvatarUrl).encode('utf-8'))) class AvatarResource(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() @defer.inlineCallbacks def test_default(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/') self.assertEqual( res, dict(redirected=avatar.AvatarResource.defaultAvatarUrl)) @defer.inlineCallbacks def test_gravatar(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[avatar.AvatarGravatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, dict(redirected=b'//www.gravatar.com/avatar/acbd18db4cc2f85ce' b'def654fccc4a4d8?d=retro&s=32')) @defer.inlineCallbacks def test_avatar_call(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[TestAvatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, b"b'foo' 32 b'http://a/b/img/nobody.png'") @defer.inlineCallbacks def test_custom_size(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[TestAvatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo&size=64') self.assertEqual(res, b"b'foo' 64 b'http://a/b/img/nobody.png'") @defer.inlineCallbacks def test_invalid_size(self): master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[TestAvatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo&size=abcd') self.assertEqual(res, b"b'foo' 32 b'http://a/b/img/nobody.png'") @defer.inlineCallbacks def test_custom_not_found(self): # use gravatar if the custom avatar fail to return a response class CustomAvatar(avatar.AvatarBase): def getUserAvatar(self, email, username, size, defaultAvatarUrl): return defer.succeed(None) master = self.make_master(url=b'http://a/b/', auth=auth.NoAuth(), avatar_methods=[CustomAvatar(), avatar.AvatarGravatar()]) rsrc = avatar.AvatarResource(master) rsrc.reconfigResource(master.config) res = yield self.render_resource(rsrc, b'/?email=foo') self.assertEqual(res, dict(redirected=b'//www.gravatar.com/avatar/acbd18db4cc2f85ce'<|fim▁hole|>github_username_search_reply = { "login": "defunkt", "id": 42424242, "node_id": "MDQ6VXNlcjQyNDI0MjQy", "avatar_url": "https://avatars3.githubusercontent.com/u/42424242?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt", "html_url": "https://github.com/defunkt", "followers_url": "https://api.github.com/users/defunkt/followers", "following_url": "https://api.github.com/users/defunkt/following{/other_user}", "gists_url": "https://api.github.com/users/defunkt/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt/subscriptions", "organizations_url": "https://api.github.com/users/defunkt/orgs", "repos_url": "https://api.github.com/users/defunkt/repos", "events_url": "https://api.github.com/users/defunkt/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt/received_events", "type": "User", "site_admin": False, "name": "Defunkt User", "company": None, "blog": "", "location": None, "email": None, "hireable": None, "bio": None, "twitter_username": None, "public_repos": 1, "public_gists": 1, "followers": 1, "following": 1, "created_at": "2000-01-01T00:00:00Z", "updated_at": "2021-01-01T00:00:00Z" } github_username_not_found_reply = { "message": "Not Found", "documentation_url": "https://docs.github.com/rest/reference/users#get-a-user" } github_email_search_reply = { "total_count": 1, "incomplete_results": False, "items": [ { "login": "defunkt", "id": 42424242, "node_id": "MDQ6VXNlcjQyNDI0MjQy", "avatar_url": "https://avatars3.githubusercontent.com/u/42424242?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt", "html_url": "https://github.com/defunkt", "followers_url": "https://api.github.com/users/defunkt/followers", "following_url": "https://api.github.com/users/defunkt/following{/other_user}", "gists_url": "https://api.github.com/users/defunkt/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt/subscriptions", "organizations_url": "https://api.github.com/users/defunkt/orgs", "repos_url": "https://api.github.com/users/defunkt/repos", "events_url": "https://api.github.com/users/defunkt/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt/received_events", "type": "User", "site_admin": False, "score": 1.0 } ] } github_email_search_not_found_reply = { "total_count": 0, "incomplete_results": False, "items": [ ] } github_commit_search_reply = { "total_count": 1, "incomplete_results": False, "items": [ { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/1111111111111111111111111111111111111111", "sha": "1111111111111111111111111111111111111111", "node_id": "MDY6Q29tbWl0NDM0MzQzNDM6MTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTEx", "html_url": "https://github.com/defunkt-org/defunkt-repo/" "commit/1111111111111111111111111111111111111111", "comments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/1111111111111111111111111111111111111111/comments", "commit": { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/commits/1111111111111111111111111111111111111111", "author": { "date": "2021-01-01T01:01:01.000-01:00", "name": "Defunkt User", "email": "[email protected]" }, "committer": { "date": "2021-01-01T01:01:01.000-01:00", "name": "Defunkt User", "email": "[email protected]" }, "message": "defunkt message", "tree": { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/trees/2222222222222222222222222222222222222222", "sha": "2222222222222222222222222222222222222222" }, "comment_count": 0 }, "author": { "login": "defunkt", "id": 42424242, "node_id": "MDQ6VXNlcjQyNDI0MjQy", "avatar_url": "https://avatars3.githubusercontent.com/u/42424242?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt", "html_url": "https://github.com/defunkt", "followers_url": "https://api.github.com/users/defunkt/followers", "following_url": "https://api.github.com/users/defunkt/following{/other_user}", "gists_url": "https://api.github.com/users/defunkt/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt/subscriptions", "organizations_url": "https://api.github.com/users/defunkt/orgs", "repos_url": "https://api.github.com/users/defunkt/repos", "events_url": "https://api.github.com/users/defunkt/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt/received_events", "type": "User", "site_admin": False }, "committer": { "login": "defunkt", "id": 42424242, "node_id": "MDQ6VXNlcjQyNDI0MjQy", "avatar_url": "https://avatars3.githubusercontent.com/u/42424242?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt", "html_url": "https://github.com/defunkt", "followers_url": "https://api.github.com/users/defunkt/followers", "following_url": "https://api.github.com/users/defunkt/following{/other_user}", "gists_url": "https://api.github.com/users/defunkt/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt/starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt/subscriptions", "organizations_url": "https://api.github.com/users/defunkt/orgs", "repos_url": "https://api.github.com/users/defunkt/repos", "events_url": "https://api.github.com/users/defunkt/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt/received_events", "type": "User", "site_admin": False }, "parents": [ { "url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits/3333333333333333333333333333333333333333", "html_url": "https://github.com/defunkt-org/defunkt-repo/" "commit/3333333333333333333333333333333333333333", "sha": "3333333333333333333333333333333333333333" } ], "repository": { "id": 43434343, "node_id": "MDEwOlJlcG9zaXRvcnk0MzQzNDM0Mw==", "name": "defunkt-repo", "full_name": "defunkt-org/defunkt-repo", "private": False, "owner": { "login": "defunkt-org", "id": 44444444, "node_id": "MDEyOk9yZ2FuaXphdGlvbjQ0NDQ0NDQ0", "avatar_url": "https://avatars2.githubusercontent.com/u/44444444?v=4", "gravatar_id": "", "url": "https://api.github.com/users/defunkt-org", "html_url": "https://github.com/defunkt-org", "followers_url": "https://api.github.com/users/defunkt-org/followers", "following_url": "https://api.github.com/users/defunkt-org/" "following{/other_user}", "gists_url": "https://api.github.com/users/defunkt-org/gists{/gist_id}", "starred_url": "https://api.github.com/users/defunkt-org/" "starred{/owner}{/repo}", "subscriptions_url": "https://api.github.com/users/defunkt-org/subscriptions", "organizations_url": "https://api.github.com/users/defunkt-org/orgs", "repos_url": "https://api.github.com/users/defunkt-org/repos", "events_url": "https://api.github.com/users/defunkt-org/events{/privacy}", "received_events_url": "https://api.github.com/users/defunkt-org/" "received_events", "type": "Organization", "site_admin": False }, "html_url": "https://github.com/defunkt-org/defunkt-repo", "description": "defunkt project", "fork": False, "url": "https://api.github.com/repos/defunkt-org/defunkt-repo", "forks_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/forks", "keys_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/keys{/key_id}", "collaborators_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "collaborators{/collaborator}", "teams_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/teams", "hooks_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/hooks", "issue_events_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues/events{/number}", "events_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/events", "assignees_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "assignees{/user}", "branches_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "branches{/branch}", "tags_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/tags", "blobs_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/blobs{/sha}", "git_tags_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/tags{/sha}", "git_refs_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/refs{/sha}", "trees_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/trees{/sha}", "statuses_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "statuses/{sha}", "languages_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "languages", "stargazers_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "stargazers", "contributors_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "contributors", "subscribers_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "subscribers", "subscription_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "subscription", "commits_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "commits{/sha}", "git_commits_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "git/commits{/sha}", "comments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "comments{/number}", "issue_comment_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues/comments{/number}", "contents_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "contents/{+path}", "compare_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "compare/{base}...{head}", "merges_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/merges", "archive_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "{archive_format}{/ref}", "downloads_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "downloads", "issues_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "issues{/number}", "pulls_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "pulls{/number}", "milestones_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "milestones{/number}", "notifications_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "notifications{?since,all,participating}", "labels_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "labels{/name}", "releases_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "releases{/id}", "deployments_url": "https://api.github.com/repos/defunkt-org/defunkt-repo/" "deployments" }, "score": 1.0 } ] } github_commit_search_not_found_reply = { "total_count": 0, "incomplete_results": False, "items": [ ] } class GitHubAvatar(TestReactorMixin, www.WwwTestMixin, unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.setUpTestReactor() master = self.make_master( url='http://a/b/', auth=auth.NoAuth(), avatar_methods=[avatar.AvatarGitHub(token="abcd")]) self.rsrc = avatar.AvatarResource(master) self.rsrc.reconfigResource(master.config) headers = { 'User-Agent': 'Buildbot', 'Authorization': 'token abcd', } self._http = yield fakehttpclientservice.HTTPClientService.getService( master, self, avatar.AvatarGitHub.DEFAULT_GITHUB_API_URL, headers=headers, debug=False, verify=False) yield self.master.startService() @defer.inlineCallbacks def tearDown(self): yield self.master.stopService() @defer.inlineCallbacks def test_username(self): username_search_endpoint = '/users/defunkt' self._http.expect('get', username_search_endpoint, content_json=github_username_search_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=defunkt') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) @defer.inlineCallbacks def test_username_not_found(self): username_search_endpoint = '/users/inexistent' self._http.expect('get', username_search_endpoint, code=404, content_json=github_username_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=inexistent') self.assertEqual(res, dict(redirected=b'img/nobody.png')) @defer.inlineCallbacks def test_username_error(self): username_search_endpoint = '/users/error' self._http.expect('get', username_search_endpoint, code=500, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=error') self.assertEqual(res, dict(redirected=b'img/nobody.png')) @defer.inlineCallbacks def test_username_cached(self): username_search_endpoint = '/users/defunkt' self._http.expect('get', username_search_endpoint, content_json=github_username_search_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/?username=defunkt') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) # Second request will give same result but without an HTTP request res = yield self.render_resource(self.rsrc, b'/?username=defunkt') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) @defer.inlineCallbacks def test_email(self): email_search_endpoint = '/search/users?q=defunkt%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, content_json=github_email_search_reply, headers={'Accept': 'application/vnd.github.v3+json'}) res = yield self.render_resource(self.rsrc, b'/[email protected]') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) @defer.inlineCallbacks def test_email_commit(self): email_search_endpoint = '/search/users?q=defunkt%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, content_json=github_email_search_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json'}) commit_search_endpoint = ('/search/commits?' 'per_page=1&q=author-email%3Adefunkt%40defunkt.com&sort=committer-date') self._http.expect('get', commit_search_endpoint, content_json=github_commit_search_reply, headers={'Accept': 'application/vnd.github.v3+json,' 'application/vnd.github.cloak-preview'}) res = yield self.render_resource(self.rsrc, b'/[email protected]') self.assertEqual(res, dict(redirected=b'https://avatars3.githubusercontent.com/' b'u/42424242?v=4&s=32')) @defer.inlineCallbacks def test_email_not_found(self): email_search_endpoint = '/search/users?q=notfound%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, content_json=github_email_search_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json'}) commit_search_endpoint = ('/search/commits?' 'per_page=1&q=author-email%3Anotfound%40defunkt.com&sort=committer-date') self._http.expect('get', commit_search_endpoint, content_json=github_commit_search_not_found_reply, headers={'Accept': 'application/vnd.github.v3+json,' 'application/vnd.github.cloak-preview'}) res = yield self.render_resource(self.rsrc, b'/[email protected]') self.assertEqual(res, dict(redirected=b'img/nobody.png')) @defer.inlineCallbacks def test_email_error(self): email_search_endpoint = '/search/users?q=error%40defunkt.com+in%3Aemail' self._http.expect('get', email_search_endpoint, code=500, headers={'Accept': 'application/vnd.github.v3+json'}) commit_search_endpoint = ('/search/commits?' 'per_page=1&q=author-email%3Aerror%40defunkt.com&sort=committer-date') self._http.expect('get', commit_search_endpoint, code=500, headers={'Accept': 'application/vnd.github.v3+json,' 'application/vnd.github.cloak-preview'}) res = yield self.render_resource(self.rsrc, b'/[email protected]') self.assertEqual(res, dict(redirected=b'img/nobody.png'))<|fim▁end|>
b'def654fccc4a4d8?d=retro&s=32'))
<|file_name|>board.rs<|end_file_name|><|fim▁begin|>extern crate image; use std::fs::File; use std::path::Path; pub struct Board { pub width: usize, pub height: usize, pub data: Vec<String> } impl Board {<|fim▁hole|> pub fn new(x: usize, y: usize) -> Board { Board { width: x, height: y, data: vec!["".to_string(); x*y] } } pub fn colour(&self, x: usize, y: usize) -> (u8,u8,u8) { let s = self.get(x, y); match s.as_ref() { "" => { (255,255,255) } "Y" => { (0,0,255) } "N" => { (255,0,0) } "?" => { (255,255,0) } "t" => { (0,0,0) } _ => { panic!("Unknown sigil: {}", s) } } } pub fn get(&self, x: usize, y: usize) -> String { self.data[ y*self.width + x ].clone() } pub fn set(&mut self, x: usize, y: usize, s: String) { self.data[ y*self.width + x ] = s.clone() } pub fn screenshot(&self, path: String) { let mut imgbuf = image::ImageBuffer::new( self.width as u32, self.height as u32 ); for x in 0 .. self.width { for y in 0 .. self.height { let (r,g,b) = self.colour(x, y); imgbuf.put_pixel(x as u32, y as u32, image::Rgb([ r, g, b ])); } } let ref mut fout = File::create(&Path::new( &path )).unwrap(); let _ = image::ImageRgb8(imgbuf).save(fout, image::PNG); } }<|fim▁end|>
<|file_name|>WSO2EventAdapter.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2005 - 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.wso2.carbon.event.output.adapter.wso2event; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.databridge.agent.AgentHolder; import org.wso2.carbon.databridge.agent.DataPublisher; import org.wso2.carbon.databridge.agent.exception.DataEndpointAgentConfigurationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointAuthenticationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointConfigurationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointException; import org.wso2.carbon.databridge.commons.Event; import org.wso2.carbon.databridge.commons.exception.TransportException; import org.wso2.carbon.event.output.adapter.core.OutputEventAdapter; import org.wso2.carbon.event.output.adapter.core.OutputEventAdapterConfiguration; import org.wso2.carbon.event.output.adapter.core.exception.ConnectionUnavailableException; import org.wso2.carbon.event.output.adapter.core.exception.OutputEventAdapterRuntimeException; import org.wso2.carbon.event.output.adapter.core.exception.TestConnectionNotSupportedException; import org.wso2.carbon.event.output.adapter.wso2event.internal.util.WSO2EventAdapterConstants; import java.util.Map; import static org.wso2.carbon.event.output.adapter.wso2event.internal.util.WSO2EventAdapterConstants.*; public final class WSO2EventAdapter implements OutputEventAdapter { private static final Log log = LogFactory.getLog(WSO2EventAdapter.class); private final OutputEventAdapterConfiguration eventAdapterConfiguration; private final Map<String, String> globalProperties; private DataPublisher dataPublisher = null; private boolean isBlockingMode = false; private long timeout = 0; private String streamId; public WSO2EventAdapter(OutputEventAdapterConfiguration eventAdapterConfiguration, Map<String, String> globalProperties) { this.eventAdapterConfiguration = eventAdapterConfiguration; this.globalProperties = globalProperties; } /** * Initialises the resource bundle */ @Override public void init() { streamId = eventAdapterConfiguration.getStaticProperties().get( WSO2EventAdapterConstants.ADAPTER_STATIC_CONFIG_STREAM_NAME) + ":" + eventAdapterConfiguration.getStaticProperties().get(WSO2EventAdapterConstants .ADAPTER_STATIC_CONFIG_STREAM_VERSION); String configPath = globalProperties.get(ADAPTOR_CONF_PATH); if (configPath != null) { AgentHolder.setConfigPath(configPath); } } @Override public void testConnect() throws TestConnectionNotSupportedException { connect(); } @Override public synchronized void connect() { <|fim▁hole|> String userName = eventAdapterConfiguration.getStaticProperties().get(ADAPTER_CONF_WSO2EVENT_PROP_USER_NAME); String password = eventAdapterConfiguration.getStaticProperties().get(ADAPTER_CONF_WSO2EVENT_PROP_PASSWORD); String authUrl = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_AUTHENTICATOR_URL); String receiverUrl = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_RECEIVER_URL); String protocol = eventAdapterConfiguration.getStaticProperties().get(ADAPTER_CONF_WSO2EVENT_PROP_PROTOCOL); String publishingMode = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_PUBLISHING_MODE); String timeoutString = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_PUBLISH_TIMEOUT_MS); if (publishingMode.equalsIgnoreCase(ADAPTER_PUBLISHING_MODE_BLOCKING)) { isBlockingMode = true; } else { try { timeout = Long.parseLong(timeoutString); } catch (RuntimeException e) { throwRuntimeException(receiverUrl, authUrl, protocol, userName, e); } } try { if (authUrl != null && authUrl.length() > 0) { dataPublisher = new DataPublisher(protocol, receiverUrl, authUrl, userName, password); } else { dataPublisher = new DataPublisher(protocol, receiverUrl, null, userName, password); } } catch (DataEndpointAgentConfigurationException e) { throwRuntimeException(receiverUrl, authUrl, protocol, userName, e); } catch (DataEndpointException e) { throwConnectionException(receiverUrl, authUrl, protocol, userName, e); } catch (DataEndpointConfigurationException e) { throwRuntimeException(receiverUrl, authUrl, protocol, userName, e); } catch (DataEndpointAuthenticationException e) { throwConnectionException(receiverUrl, authUrl, protocol, userName, e); } catch (TransportException e) { throwConnectionException(receiverUrl, authUrl, protocol, userName, e); } } @Override public void publish(Object message, Map<String, String> dynamicProperties) { Event event = (Event) (message); //StreamDefinition streamDefinition = (StreamDefinition) ((Object[]) message)[1]; event.setStreamId(streamId); if (isBlockingMode) { dataPublisher.publish(event); } else { dataPublisher.tryPublish(event, timeout); } } @Override public void disconnect() { if (dataPublisher != null) { try { dataPublisher.shutdown(); } catch (DataEndpointException e) { String userName = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_USER_NAME); String authUrl = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_AUTHENTICATOR_URL); String receiverUrl = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_RECEIVER_URL); String protocol = eventAdapterConfiguration.getStaticProperties() .get(ADAPTER_CONF_WSO2EVENT_PROP_PROTOCOL); logException("Error in shutting down the data publisher", receiverUrl, authUrl, protocol, userName, e); } } } @Override public void destroy() { } private void throwRuntimeException(String receiverUrl, String authUrl, String protocol, String userName, Exception e) { throw new OutputEventAdapterRuntimeException( "Error in data-bridge config for adaptor " + eventAdapterConfiguration.getName() + " with the receiverUrl:" + receiverUrl + " authUrl:" + authUrl + " protocol:" + protocol + " and userName:" + userName + "," + e.getMessage(), e); } private void logException(String message, String receiverUrl, String authUrl, String protocol, String userName, Exception e) { log.error(message + " for adaptor " + eventAdapterConfiguration.getName() + " with the receiverUrl:" + receiverUrl + " authUrl:" + authUrl + " protocol:" + protocol + " and userName:" + userName + "," + e.getMessage(), e); } private void throwConnectionException(String receiverUrl, String authUrl, String protocol, String userName, Exception e) { throw new ConnectionUnavailableException( "Connection not available for adaptor " + eventAdapterConfiguration.getName() + " with the receiverUrl:" + receiverUrl + " authUrl:" + authUrl + " protocol:" + protocol + " and userName:" + userName + "," + e.getMessage(), e); } }<|fim▁end|>
<|file_name|>containermgmt.go<|end_file_name|><|fim▁begin|>/* Copyright 2015 Crunchy Data Solutions, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package adminapi import ( "github.com/ant0ine/go-json-rest/rest" "github.com/crunchydata/crunchy-postgresql-manager/admindb" "github.com/crunchydata/crunchy-postgresql-manager/cpmserverapi" "github.com/crunchydata/crunchy-postgresql-manager/logit" "github.com/crunchydata/crunchy-postgresql-manager/swarmapi" "github.com/crunchydata/crunchy-postgresql-manager/types" "github.com/crunchydata/crunchy-postgresql-manager/util" "net/http" "strings" ) const CONTAINER_NOT_FOUND = "CONTAINER NOT FOUND" // GetNode returns the container node definition func GetNode(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } ID := r.PathParam("ID") if ID == "" { logit.Error.Println("error node ID required") rest.Error(w, "node ID required", http.StatusBadRequest) return } node, err2 := admindb.GetContainer(dbConn, ID) if node.ID == "" { rest.NotFound(w, r) return } if err2 != nil { logit.Error.Println(err2.Error()) rest.Error(w, err2.Error(), http.StatusBadRequest) return } var currentStatus = "UNKNOWN" request := &swarmapi.DockerInspectRequest{} var inspectInfo swarmapi.DockerInspectResponse request.ContainerName = node.Name inspectInfo, err = swarmapi.DockerInspect(request) if err != nil { logit.Error.Println(err.Error()) currentStatus = CONTAINER_NOT_FOUND } if currentStatus != "CONTAINER NOT FOUND" { var pgport types.Setting pgport, err = admindb.GetSetting(dbConn, "PG-PORT") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } currentStatus, err = util.FastPing(pgport.Value, node.Name) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } //logit.Info.Println("pinging db finished") } clusternode := new(types.ClusterNode) clusternode.ID = node.ID clusternode.ClusterID = node.ClusterID clusternode.Name = node.Name clusternode.Role = node.Role clusternode.Image = node.Image clusternode.CreateDate = node.CreateDate clusternode.Status = currentStatus clusternode.ProjectID = node.ProjectID clusternode.ProjectName = node.ProjectName clusternode.ClusterName = node.ClusterName clusternode.ServerID = inspectInfo.ServerID clusternode.IPAddress = inspectInfo.IPAddress w.WriteJson(clusternode) } // GetAllNodesForProject returns all node definitions for a given project func GetAllNodesForProject(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } ID := r.PathParam("ID") if ID == "" { logit.Error.Println("project ID required") rest.Error(w, "project ID required", http.StatusBadRequest) return } results, err := admindb.GetAllContainersForProject(dbConn, ID) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) } nodes := make([]types.ClusterNode, len(results)) i := 0 for i = range results { nodes[i].ID = results[i].ID nodes[i].Name = results[i].Name nodes[i].ClusterID = results[i].ClusterID nodes[i].Role = results[i].Role nodes[i].Image = results[i].Image nodes[i].CreateDate = results[i].CreateDate nodes[i].ProjectID = results[i].ProjectID nodes[i].ProjectName = results[i].ProjectName nodes[i].ClusterName = results[i].ClusterName //nodes[i].Status = "UNKNOWN" i++ } w.WriteJson(&nodes) } // TODO func GetAllNodes(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } results, err := admindb.GetAllContainers(dbConn) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) } nodes := make([]types.ClusterNode, len(results)) i := 0 for i = range results { nodes[i].ID = results[i].ID nodes[i].Name = results[i].Name nodes[i].ClusterID = results[i].ClusterID nodes[i].Role = results[i].Role nodes[i].Image = results[i].Image nodes[i].CreateDate = results[i].CreateDate nodes[i].ProjectID = results[i].ProjectID nodes[i].ProjectName = results[i].ProjectName nodes[i].ClusterName = results[i].ClusterName //nodes[i].Status = "UNKNOWN" i++ } w.WriteJson(&nodes) } // TODO func GetAllNodesNotInCluster(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } results, err := admindb.GetAllContainersNotInCluster(dbConn) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) } nodes := make([]types.ClusterNode, len(results)) i := 0 for i = range results { nodes[i].ID = results[i].ID nodes[i].Name = results[i].Name nodes[i].ClusterID = results[i].ClusterID nodes[i].Role = results[i].Role nodes[i].Image = results[i].Image nodes[i].CreateDate = results[i].CreateDate nodes[i].ProjectID = results[i].ProjectID nodes[i].ProjectName = results[i].ProjectName nodes[i].ClusterName = results[i].ClusterName //nodes[i].Status = "UNKNOWN" i++ } w.WriteJson(&nodes) } // GetAllNodesForCluster returns a list of nodes for a given cluster func GetAllNodesForCluster(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } ClusterID := r.PathParam("ClusterID") if ClusterID == "" { logit.Error.Println("ClusterID required") rest.Error(w, "node ClusterID required", http.StatusBadRequest) return } results, err := admindb.GetAllContainersForCluster(dbConn, ClusterID) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) } nodes := make([]types.ClusterNode, len(results)) i := 0 for i = range results { nodes[i].ID = results[i].ID nodes[i].Name = results[i].Name nodes[i].ClusterID = results[i].ClusterID nodes[i].Role = results[i].Role nodes[i].Image = results[i].Image nodes[i].CreateDate = results[i].CreateDate nodes[i].ProjectID = results[i].ProjectID nodes[i].ProjectName = results[i].ProjectName nodes[i].ClusterName = results[i].ClusterName //nodes[i].Status = "UNKNOWN" i++ } w.WriteJson(&nodes) } /* TODO refactor this to share code with DeleteCluster!!!!! */ func DeleteNode(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-container") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } ID := r.PathParam("ID") if ID == "" { logit.Error.Println("DeleteNode: error node ID required") rest.Error(w, "node ID required", http.StatusBadRequest) return } //go get the node we intend to delete var dbNode types.Container dbNode, err = admindb.GetContainer(dbConn, ID) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } var infoResponse swarmapi.DockerInfoResponse infoResponse, err = swarmapi.DockerInfo() if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusInternalServerError) return } servers := make([]types.Server, len(infoResponse.Output)) i := 0 for i = range infoResponse.Output { servers[i].ID = infoResponse.Output[i] servers[i].Name = infoResponse.Output[i] servers[i].IPAddress = infoResponse.Output[i] i++ } var pgdatapath types.Setting pgdatapath, err = admindb.GetSetting(dbConn, "PG-DATA-PATH") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } err = admindb.DeleteContainer(dbConn, ID) if err != nil {<|fim▁hole|> rest.Error(w, err.Error(), http.StatusBadRequest) return } logit.Info.Println("remove 1") //it is possible that someone can remove a container //outside of us, so we let it pass that we can't remove //it request := &swarmapi.DockerRemoveRequest{} request.ContainerName = dbNode.Name _, err = swarmapi.DockerRemove(request) if err != nil { logit.Error.Println(err.Error()) } logit.Info.Println("remove 2") //send the server a deletevolume command request2 := &cpmserverapi.DiskDeleteRequest{} request2.Path = pgdatapath.Value + "/" + dbNode.Name for _, each := range servers { _, err = cpmserverapi.DiskDeleteClient(each.Name, request2) if err != nil { logit.Error.Println(err.Error()) } } logit.Info.Println("remove 3") //we should not have to delete the DNS entries because //of the dnsbridge, it should remove them when we remove //the containers via the docker api w.WriteHeader(http.StatusOK) status := types.SimpleStatus{} status.Status = "OK" w.WriteJson(&status) } // GetAllNodesForServer returns a list of all nodes on a given server func GetAllNodesForServer(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } serverID := r.PathParam("ServerID") if serverID == "" { logit.Error.Println("GetAllNodesForServer: error serverID required") rest.Error(w, "serverID required", http.StatusBadRequest) return } serverIPAddress := strings.Replace(serverID, "_", ".", -1) results, err := swarmapi.DockerPs(serverIPAddress) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } nodes := make([]types.ClusterNode, len(results.Output)) i := 0 var container types.Container for _, each := range results.Output { logit.Info.Println("got back Name:" + each.Name + " Status:" + each.Status + " Image:" + each.Image) nodes[i].Name = each.Name container, err = admindb.GetContainerByName(dbConn, each.Name) if err != nil { logit.Error.Println(err.Error()) nodes[i].ID = "unknown" nodes[i].ProjectID = "unknown" } else { nodes[i].ID = container.ID nodes[i].ProjectID = container.ProjectID } nodes[i].Status = each.Status nodes[i].Image = each.Image i++ } w.WriteJson(&nodes) } // AdminStartNode starts a container func AdminStartNode(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } ID := r.PathParam("ID") if ID == "" { logit.Error.Println("AdminStartNode: error ID required") rest.Error(w, "ID required", http.StatusBadRequest) return } node, err := admindb.GetContainer(dbConn, ID) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } /** server := types.Server{} server, err = admindb.GetServer(dbConn, node.ServerID) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } */ var response swarmapi.DockerStartResponse request := &swarmapi.DockerStartRequest{} request.ContainerName = node.Name response, err = swarmapi.DockerStart(request) if err != nil { logit.Error.Println(err.Error()) logit.Error.Println(response.Output) } //logit.Info.Println(response.Output) w.WriteHeader(http.StatusOK) status := types.SimpleStatus{} status.Status = "OK" w.WriteJson(&status) } // AdminStopNode stops a container func AdminStopNode(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } ID := r.PathParam("ID") if ID == "" { logit.Error.Println("AdminStopNode: error ID required") rest.Error(w, "ID required", http.StatusBadRequest) return } node, err := admindb.GetContainer(dbConn, ID) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } /** server := types.Server{} server, err = admindb.GetServer(dbConn, node.ServerID) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } */ request := &swarmapi.DockerStopRequest{} request.ContainerName = node.Name _, err = swarmapi.DockerStop(request) if err != nil { logit.Error.Println(err.Error()) } w.WriteHeader(http.StatusOK) status := types.SimpleStatus{} status.Status = "OK" w.WriteJson(&status) } // AdminStartServerContainers starts all containers on a given server func AdminStartServerContainers(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } //serverID serverid := r.PathParam("ID") if serverid == "" { logit.Error.Println(" error ID required") rest.Error(w, "ID required", http.StatusBadRequest) return } cleanIP := strings.Replace(serverid, "_", ".", -1) containers, err := swarmapi.DockerPs(cleanIP) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } //for each, get server, start container //use a 'best effort' approach here since containers //can be removed outside of CPM's control for _, each := range containers.Output { //start the container var response swarmapi.DockerStartResponse var err error request := &swarmapi.DockerStartRequest{} logit.Info.Println("trying to start " + each.Name) request.ContainerName = each.Name response, err = swarmapi.DockerStart(request) if err != nil { logit.Error.Println("AdminStartServerContainers: error when trying to start container " + err.Error()) logit.Error.Println(response.Output) } //logit.Info.Println(response.Output) } w.WriteHeader(http.StatusOK) status := types.SimpleStatus{} status.Status = "OK" w.WriteJson(&status) } // AdminStopServerContainers stops all containers on a given server func AdminStopServerContainers(w rest.ResponseWriter, r *rest.Request) { dbConn, err := util.GetConnection(CLUSTERADMIN_DB) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), 400) return } defer dbConn.Close() err = secimpl.Authorize(dbConn, r.PathParam("Token"), "perm-read") if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusUnauthorized) return } //serverID serverid := r.PathParam("ID") if serverid == "" { logit.Error.Println("AdminStopoServerContainers: error ID required") rest.Error(w, "ID required", http.StatusBadRequest) return } cleanIP := strings.Replace(serverid, "_", ".", -1) containers, err := swarmapi.DockerPs(cleanIP) if err != nil { logit.Error.Println(err.Error()) rest.Error(w, err.Error(), http.StatusBadRequest) return } //for each, get server, stop container for _, each := range containers.Output { if strings.HasPrefix(each.Status, "Up") { //stop container request := &swarmapi.DockerStopRequest{} request.ContainerName = each.Name logit.Info.Println("stopping " + request.ContainerName) _, err = swarmapi.DockerStop(request) if err != nil { logit.Error.Println("AdminStopServerContainers: error when trying to start container " + err.Error()) } } } w.WriteHeader(http.StatusOK) status := types.SimpleStatus{} status.Status = "OK" w.WriteJson(&status) }<|fim▁end|>
logit.Error.Println(err.Error())
<|file_name|>definition_structure.hpp<|end_file_name|><|fim▁begin|>/*! * \file definition_structure.hpp * \brief Headers of the main subroutines used by SU2_EDU. * The subroutines and functions are in the <i>definition_structure.cpp</i> file. * \author Aerospace Design Laboratory (Stanford University). * \version 1.2.0 * * SU2 EDU, Copyright (C) 2014 Aerospace Design Laboratory (Stanford University). * * SU2 EDU is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * SU2 EDU is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with SU2 EDU. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|> #include <ctime> #include "solver_structure.hpp" #include "integration_structure.hpp" #include "output_structure.hpp" #include "numerics_structure.hpp" #include "geometry_structure.hpp" #include "config_structure.hpp" using namespace std; /*! * \brief Gets the number of zones in the mesh file. * \param[in] val_mesh_filename - Name of the file with the grid information. * \param[in] val_format - Format of the file with the grid information. * \param[in] config - Definition of the particular problem. * \return Total number of zones in the grid file. */ unsigned short GetnZone(string val_mesh_filename, unsigned short val_format, CConfig *config); /*! * \brief Gets the number of dimensions in the mesh file * \param[in] val_mesh_filename - Name of the file with the grid information. * \param[in] val_format - Format of the file with the grid information. * \return Total number of domains in the grid file. */ unsigned short GetnDim(string val_mesh_filename, unsigned short val_format); /*! * \brief Definition and allocation of all solution classes. * \param[in] solver_container - Container vector with all the solutions. * \param[in] geometry - Geometrical definition of the problem. * \param[in] config - Definition of the particular problem. * \param[in] iZone - Index of the zone. */ void Solver_Preprocessing(CSolver ***solver_container, CGeometry **geometry, CConfig *config); /*! * \brief Definition and allocation of all integration classes. * \param[in] integration_container - Container vector with all the integration methods. * \param[in] geometry - Geometrical definition of the problem. * \param[in] config - Definition of the particular problem. * \param[in] iZone - Index of the zone. */ void Integration_Preprocessing(CIntegration **integration_container, CGeometry **geometry, CConfig *config); /*! * \brief Definition and allocation of all solver classes. * \param[in] numerics_container - Description of the numerical method (the way in which the equations are solved). * \param[in] solver_container - Container vector with all the solutions. * \param[in] geometry - Geometrical definition of the problem. * \param[in] config - Definition of the particular problem. * \param[in] iZone - Index of the zone. */ void Numerics_Preprocessing(CNumerics ****numerics_container, CSolver ***solver_container, CGeometry **geometry, CConfig *config); /*! * \brief Do the geometrical preprocessing. * \param[in] geometry - Geometrical definition of the problem. * \param[in] config - Definition of the particular problem. * \param[in] val_nZone - Total number of zones. */ void Geometrical_Preprocessing(CGeometry **geometry, CConfig *config);<|fim▁end|>
*/ #pragma once
<|file_name|>solution.cpp<|end_file_name|><|fim▁begin|>class Solution { public: vector<int> ori, cur; random_device rd; mt19937 g; Solution(vector<int> nums) : ori(nums), cur(nums), g(rd()) { } /** Resets the array to its original configuration and return it. */ vector<int> reset() { return cur = ori; } /** Returns a random shuffling of the array. */ vector<int> shuffle() { std::shuffle(cur.begin(), cur.end(), g); return cur; } }; <|fim▁hole|>/** * Your Solution object will be instantiated and called as such: * Solution obj = new Solution(nums); * vector<int> param_1 = obj.reset(); * vector<int> param_2 = obj.shuffle(); */<|fim▁end|>
<|file_name|>xquad_test.py<|end_file_name|><|fim▁begin|># coding=utf-8<|fim▁hole|># # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for xquad dataset module.""" from tensorflow_datasets import testing from tensorflow_datasets.question_answering import xquad class XquadTest(testing.DatasetBuilderTestCase): DATASET_CLASS = xquad.Xquad BUILDER_CONFIG_NAMES_TO_TEST = ["ar"] DL_EXTRACT_RESULT = { "translate-train": "translate-train.json", "translate-dev": "translate-dev.json", "translate-test": "translate-test.json", "test": "test.json", } SPLITS = { "translate-train": 3, "translate-dev": 2, "translate-test": 3, "test": 1, } if __name__ == "__main__": testing.test_main()<|fim▁end|>
# Copyright 2022 The TensorFlow Datasets Authors.
<|file_name|>PdxType.cpp<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * PdxObject.cpp * * Created on: Sep 29, 2011 * Author: npatel */ #include "PdxType.hpp" using namespace apache::geode::client; using namespace PdxTests; template <typename T1, typename T2> bool PdxTests::PdxType::genericValCompare(T1 value1, T2 value2) const { if (value1 != value2) return false; LOGINFO("PdxObject::genericValCompare Line_19"); return true; } template <typename T1, typename T2> bool PdxTests::PdxType::genericCompare(T1* value1, T2* value2, int length) const { int i = 0; while (i < length) { if (value1[i] != value2[i]) { return false; } else { i++; } } LOGINFO("PdxObject::genericCompare Line_34"); return true; } template <typename T1, typename T2> bool PdxTests::PdxType::generic2DCompare(T1** value1, T2** value2, int length, int* arrLengths) const { LOGINFO("generic2DCompare length = %d ", length); LOGINFO("generic2DCompare value1 = %d \t value2", value1[0][0], value2[0][0]); LOGINFO("generic2DCompare value1 = %d \t value2", value1[1][0], value2[1][0]); LOGINFO("generic2DCompare value1 = %d \t value2", value1[1][1], value2[1][1]); for (int j = 0; j < length; j++) { LOGINFO("generic2DCompare arrlength0 = %d ", arrLengths[j]); for (int k = 0; k < arrLengths[j]; k++) { LOGINFO("generic2DCompare arrlength = %d ", arrLengths[j]); LOGINFO("generic2DCompare value1 = %d \t value2 = %d ", value1[j][k], value2[j][k]); if (value1[j][k] != value2[j][k]) return false; } } LOGINFO("PdxObject::genericCompare Line_34"); return true; } // PdxType::~PdxObject() { //} void PdxTests::PdxType::toData(PdxWriterPtr pw) /*const*/ { // TODO:delete it later int* lengthArr = new int[2]; lengthArr[0] = 1; lengthArr[1] = 2; pw->writeArrayOfByteArrays("m_byteByteArray", m_byteByteArray, 2, lengthArr); pw->writeWideChar("m_char", m_char); pw->markIdentityField("m_char"); pw->writeBoolean("m_bool", m_bool); // 1 pw->markIdentityField("m_bool"); pw->writeBooleanArray("m_boolArray", m_boolArray, 3); pw->markIdentityField("m_boolArray"); pw->writeByte("m_byte", m_byte); pw->markIdentityField("m_byte"); pw->writeByteArray("m_byteArray", m_byteArray, 2); pw->markIdentityField("m_byteArray"); pw->writeWideCharArray("m_charArray", m_charArray, 2); pw->markIdentityField("m_charArray"); pw->writeObject("m_arraylist", m_arraylist); pw->writeObject("m_linkedlist", m_linkedlist); pw->markIdentityField("m_arraylist"); pw->writeObject("m_map", m_map); pw->markIdentityField("m_map"); pw->writeObject("m_hashtable", m_hashtable); pw->markIdentityField("m_hashtable"); pw->writeObject("m_vector", m_vector); pw->markIdentityField("m_vector"); pw->writeObject("m_chs", m_chs); pw->markIdentityField("m_chs"); pw->writeObject("m_clhs", m_clhs); pw->markIdentityField("m_clhs"); pw->writeString("m_string", m_string); pw->markIdentityField("m_string"); pw->writeDate("m_dateTime", m_date); pw->markIdentityField("m_dateTime"); pw->writeDouble("m_double", m_double); pw->markIdentityField("m_double"); pw->writeDoubleArray("m_doubleArray", m_doubleArray, 2); pw->markIdentityField("m_doubleArray"); pw->writeFloat("m_float", m_float); pw->markIdentityField("m_float"); pw->writeFloatArray("m_floatArray", m_floatArray, 2); pw->markIdentityField("m_floatArray"); pw->writeShort("m_int16", m_int16); pw->markIdentityField("m_int16"); pw->writeInt("m_int32", m_int32); pw->markIdentityField("m_int32"); pw->writeLong("m_long", m_long); pw->markIdentityField("m_long"); pw->writeIntArray("m_int32Array", m_int32Array, 4); pw->markIdentityField("m_int32Array"); pw->writeLongArray("m_longArray", m_longArray, 2); pw->markIdentityField("m_longArray"); pw->writeShortArray("m_int16Array", m_int16Array, 2); pw->markIdentityField("m_int16Array"); pw->writeByte("m_sbyte", m_sbyte); pw->markIdentityField("m_sbyte"); pw->writeByteArray("m_sbyteArray", m_sbyteArray, 2); pw->markIdentityField("m_sbyteArray"); // int* strlengthArr = new int[2]; // strlengthArr[0] = 5; // strlengthArr[1] = 5; pw->writeStringArray("m_stringArray", m_stringArray, 2); pw->markIdentityField("m_stringArray"); pw->writeShort("m_uint16", m_uint16); pw->markIdentityField("m_uint16"); pw->writeInt("m_uint32", m_uint32); pw->markIdentityField("m_uint32"); pw->writeLong("m_ulong", m_ulong); pw->markIdentityField("m_ulong"); pw->writeIntArray("m_uint32Array", m_uint32Array, 4); pw->markIdentityField("m_uint32Array"); pw->writeLongArray("m_ulongArray", m_ulongArray, 2); pw->markIdentityField("m_ulongArray"); pw->writeShortArray("m_uint16Array", m_uint16Array, 2); pw->markIdentityField("m_uint16Array"); pw->writeByteArray("m_byte252", m_byte252, 252); pw->markIdentityField("m_byte252"); pw->writeByteArray("m_byte253", m_byte253, 253); pw->markIdentityField("m_byte253"); pw->writeByteArray("m_byte65535", m_byte65535, 65535); pw->markIdentityField("m_byte65535"); pw->writeByteArray("m_byte65536", m_byte65536, 65536); pw->markIdentityField("m_byte65536"); pw->writeObject("m_pdxEnum", m_pdxEnum); pw->markIdentityField("m_pdxEnum"); pw->writeObject("m_address", m_objectArray); pw->writeObjectArray("m_objectArray", m_objectArray); pw->writeObjectArray("", m_objectArrayEmptyPdxFieldName); LOGDEBUG("PdxObject::writeObject() for enum Done......"); LOGDEBUG("PdxObject::toData() Done......"); // TODO:delete it later } void PdxTests::PdxType::fromData(PdxReaderPtr pr) { // TODO:temp added, delete later int32_t* Lengtharr; GF_NEW(Lengtharr, int32_t[2]); int32_t arrLen = 0; m_byteByteArray = pr->readArrayOfByteArrays("m_byteByteArray", arrLen, &Lengtharr); // TODO::need to write compareByteByteArray() and check for m_byteByteArray // elements m_char = pr->readWideChar("m_char"); // GenericValCompare m_bool = pr->readBoolean("m_bool"); // GenericValCompare m_boolArray = pr->readBooleanArray("m_boolArray", boolArrayLen); m_byte = pr->readByte("m_byte"); m_byteArray = pr->readByteArray("m_byteArray", byteArrayLen); m_charArray = pr->readWideCharArray("m_charArray", charArrayLen); m_arraylist = pr->readObject("m_arraylist"); m_linkedlist = dynCast<CacheableLinkedListPtr>(pr->readObject("m_linkedlist")); m_map = dynCast<CacheableHashMapPtr>(pr->readObject("m_map")); // TODO:Check for the size<|fim▁hole|> m_hashtable = pr->readObject("m_hashtable"); // TODO:Check for the size m_vector = pr->readObject("m_vector"); // TODO::Check for size m_chs = pr->readObject("m_chs"); // TODO::Size check m_clhs = pr->readObject("m_clhs"); // TODO:Size check m_string = pr->readString("m_string"); // GenericValCompare m_date = pr->readDate("m_dateTime"); // compareData m_double = pr->readDouble("m_double"); m_doubleArray = pr->readDoubleArray("m_doubleArray", doubleArrayLen); m_float = pr->readFloat("m_float"); m_floatArray = pr->readFloatArray("m_floatArray", floatArrayLen); m_int16 = pr->readShort("m_int16"); m_int32 = pr->readInt("m_int32"); m_long = pr->readLong("m_long"); m_int32Array = pr->readIntArray("m_int32Array", intArrayLen); m_longArray = pr->readLongArray("m_longArray", longArrayLen); m_int16Array = pr->readShortArray("m_int16Array", shortArrayLen); m_sbyte = pr->readByte("m_sbyte"); m_sbyteArray = pr->readByteArray("m_sbyteArray", byteArrayLen); m_stringArray = pr->readStringArray("m_stringArray", strLenArray); m_uint16 = pr->readShort("m_uint16"); m_uint32 = pr->readInt("m_uint32"); m_ulong = pr->readLong("m_ulong"); m_uint32Array = pr->readIntArray("m_uint32Array", intArrayLen); m_ulongArray = pr->readLongArray("m_ulongArray", longArrayLen); m_uint16Array = pr->readShortArray("m_uint16Array", shortArrayLen); // LOGINFO("PdxType::readInt() start..."); m_byte252 = pr->readByteArray("m_byte252", m_byte252Len); m_byte253 = pr->readByteArray("m_byte253", m_byte253Len); m_byte65535 = pr->readByteArray("m_byte65535", m_byte65535Len); m_byte65536 = pr->readByteArray("m_byte65536", m_byte65536Len); // TODO:Check for size m_pdxEnum = pr->readObject("m_pdxEnum"); m_address = pr->readObject("m_address"); // size chaeck m_objectArray = pr->readObjectArray("m_objectArray"); m_objectArrayEmptyPdxFieldName = pr->readObjectArray(""); // Check for individual elements // TODO:temp added delete it later LOGINFO("PdxObject::readObject() for enum Done..."); } CacheableStringPtr PdxTests::PdxType::toString() const { char idbuf[1024]; // sprintf(idbuf,"PdxObject: [ m_bool=%d ] [m_byte=%d] [m_int16=%d] // [m_int32=%d] [m_float=%f] [m_double=%lf] [ m_string=%s ]",m_bool, m_byte, // m_int16, m_int32, m_float, m_double, m_string); sprintf(idbuf, "PdxObject:[m_int32=%d]", m_int32); return CacheableString::create(idbuf); } bool PdxTests::PdxType::equals(PdxTests::PdxType& other, bool isPdxReadSerialized) const { PdxType* ot = dynamic_cast<PdxType*>(&other); if (ot == NULL) { return false; } if (ot == this) { return true; } genericValCompare(ot->m_int32, m_int32); genericValCompare(ot->m_bool, m_bool); genericValCompare(ot->m_byte, m_byte); genericValCompare(ot->m_int16, m_int16); genericValCompare(ot->m_long, m_long); genericValCompare(ot->m_float, m_float); genericValCompare(ot->m_double, m_double); genericValCompare(ot->m_sbyte, m_sbyte); genericValCompare(ot->m_uint16, m_uint16); genericValCompare(ot->m_uint32, m_uint32); genericValCompare(ot->m_ulong, m_ulong); genericValCompare(ot->m_char, m_char); if (strcmp(ot->m_string, m_string) != 0) { return false; } genericCompare(ot->m_byteArray, m_byteArray, byteArrayLen); genericCompare(ot->m_int16Array, m_int16Array, shortArrayLen); genericCompare(ot->m_int32Array, m_int32Array, intArrayLen); genericCompare(ot->m_longArray, m_longArray, longArrayLen); genericCompare(ot->m_doubleArray, m_doubleArray, doubleArrayLen); genericCompare(ot->m_floatArray, m_floatArray, floatArrayLen); genericCompare(ot->m_uint32Array, m_uint32Array, intArrayLen); genericCompare(ot->m_ulongArray, m_ulongArray, longArrayLen); genericCompare(ot->m_uint16Array, m_uint16Array, shortArrayLen); genericCompare(ot->m_sbyteArray, m_sbyteArray, shortArrayLen); genericCompare(ot->m_charArray, m_charArray, charArrayLen); // generic2DCompare(ot->m_byteByteArray, m_byteByteArray, byteByteArrayLen, // lengthArr); if (!isPdxReadSerialized) { for (int i = 0; i < m_objectArray->size(); i++) { Address* otherAddr1 = dynamic_cast<Address*>(ot->m_objectArray->at(i).ptr()); Address* myAddr1 = dynamic_cast<Address*>(m_objectArray->at(i).ptr()); if (!otherAddr1->equals(*myAddr1)) return false; } LOGINFO("PdxObject::equals isPdxReadSerialized = %d", isPdxReadSerialized); } // m_objectArrayEmptyPdxFieldName if (!isPdxReadSerialized) { for (int i = 0; i < m_objectArrayEmptyPdxFieldName->size(); i++) { Address* otherAddr1 = dynamic_cast<Address*>(ot->m_objectArray->at(i).ptr()); Address* myAddr1 = dynamic_cast<Address*>(m_objectArray->at(i).ptr()); if (!otherAddr1->equals(*myAddr1)) return false; } LOGINFO("PdxObject::equals Empty Field Name isPdxReadSerialized = %d", isPdxReadSerialized); } CacheableEnumPtr myenum = dynCast<CacheableEnumPtr>(m_pdxEnum); CacheableEnumPtr otenum = dynCast<CacheableEnumPtr>(ot->m_pdxEnum); if (myenum->getEnumOrdinal() != otenum->getEnumOrdinal()) return false; if (strcmp(myenum->getEnumClassName(), otenum->getEnumClassName()) != 0) { return false; } if (strcmp(myenum->getEnumName(), otenum->getEnumName()) != 0) return false; genericValCompare(ot->m_arraylist->size(), m_arraylist->size()); for (int k = 0; k < m_arraylist->size(); k++) { genericValCompare(ot->m_arraylist->at(k), m_arraylist->at(k)); } LOGINFO("Equals Linked List Starts"); genericValCompare(ot->m_linkedlist->size(), m_linkedlist->size()); for (int k = 0; k < m_linkedlist->size(); k++) { genericValCompare(ot->m_linkedlist->at(k), m_linkedlist->at(k)); } LOGINFO("Equals Linked List Finished"); genericValCompare(ot->m_vector->size(), m_vector->size()); for (int j = 0; j < m_vector->size(); j++) { genericValCompare(ot->m_vector->at(j), m_vector->at(j)); } LOGINFO("PdxObject::equals DOne Line_201"); return true; }<|fim▁end|>
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # hrCMS documentation build configuration file, created by # sphinx-quickstart on Sat Mar 28 20:11:13 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import shlex # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames.<|fim▁hole|># source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'leonardo-api' copyright = u'2015, Michael Kuty' author = u'Michael Kuty' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0' # The full version, including alpha/beta/rc tags. release = '0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'hrCMSdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'leonardo_api.tex', u'leonardo-api', u'Michael Kuty', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'leonardo_api', u'leonardo-api Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'leonardo_api', u'leonardo-api', author, 'leonardo_api', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None}<|fim▁end|>
# You can specify multiple suffix as a list of string:
<|file_name|>move_base_force_cancel.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python import rospy import actionlib from move_base_msgs.msg import MoveBaseActionGoal from actionlib_msgs.msg import GoalID class ForceCancel(object): def __init__(self, nodename="force_cancel", is_newnode=True, repetition=10): self.repetition = rospy.get_param("~repetition", repetition) if is_newnode: rospy.init_node(name=nodename, anonymous=False) rospy.on_shutdown(self.shutdown) pub = rospy.Publisher("move_base/cancel", GoalID, queue_size=1) sub = rospy.Subscriber("move_base/goal", MoveBaseActionGoal, self.callback, queue_size=1) rospy.wait_for_message("move_base/goal", MoveBaseActionGoal, 60) r = rospy.Rate(1) counter = 0 while not rospy.is_shutdown() and (counter < self.repetition): msg = GoalID() msg.id = self.id pub.publish(msg) r.sleep() counter += 1 def callback(self, msg):<|fim▁hole|> def shutdown(self): rospy.loginfo("cancel job finished") rospy.sleep(1) pass if __name__ == "__main__": fc = ForceCancel('force_cancel', False, 5)<|fim▁end|>
self.id = msg.goal_id.id
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![crate_name = "rusthello_lib"] #![crate_type = "lib"]<|fim▁hole|> // External crates extern crate rand; extern crate rayon; extern crate reversi; extern crate termion; // Modules pub mod interface; pub mod human_player; pub mod ai_player; use reversi::{ReversiError}; use reversi::game::{PlayerAction}; use std::result; pub enum OtherAction { Help, Quit, } pub type Action = PlayerAction<OtherAction>; pub type Result<T> = result::Result<T, ReversiError>;<|fim▁end|>
<|file_name|>convertTreeToEvt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>import os import sys import math import pyRootPwa import pyRootPwa.core def writeParticleToFile (outFile, particleName, particleMomentum): if pyRootPwa.core.particleDataTable.isInTable(particleName): partProperties = pyRootPwa.core.particleDataTable.entry(particleName) charge = partProperties.charge energy = math.sqrt(particleMomentum.Px()**2 + particleMomentum.Py()**2 + particleMomentum.Pz()**2 + partProperties.mass2) outFile.write( str(pyRootPwa.core.particleDataTable.geantIdFromParticleName(particleName)) + " " + str(charge) + " " + '%.16e' % particleMomentum.Px() + " " + '%.16e' % particleMomentum.Py() + " " + '%.16e' % particleMomentum.Pz() + " " + '%.16e' % energy + "\n" ) return True else: pyRootPwa.utils.printErr("particle '" + particleName + "' not found in particleDataTable.") return False if __name__ == "__main__": parser = argparse.ArgumentParser( description="Converts ROOTPWA .root file to .evt file." ) parser.add_argument("inputFileName", help="The path to the RootPwa input file") parser.add_argument("outputFileName", help="The path to the ASCII evt output file") parser.add_argument("-p", "--particleDataTable", help="The path of particleDataTable file (default: '$ROOTPWA/particleData/particleDataTable.txt')", default='$ROOTPWA/particleData/particleDataTable.txt') args = parser.parse_args() printWarn = pyRootPwa.utils.printWarn printErr = pyRootPwa.utils.printErr printSucc = pyRootPwa.utils.printSucc ROOT = pyRootPwa.ROOT pdtPath = os.path.expandvars(args.particleDataTable) if not pyRootPwa.core.particleDataTable.instance.readFile(pdtPath): printErr("error loading particleDataTable from '" + pdtPath + "'. Aborting...") sys.exit(1) inputFile = ROOT.TFile(args.inputFileName, "READ") if not inputFile: printErr("error opening input file. Aborting...") sys.exit(1) metaData = pyRootPwa.core.eventMetadata.readEventFile(inputFile) if metaData == 0: printErr("error reading metaData. Input file is not a RootPWA root file.") prodKinPartNames = metaData.productionKinematicsParticleNames() decayKinPartNames = metaData.decayKinematicsParticleNames() tree = metaData.eventTree() with open(args.outputFileName, 'w') as outputEvtFile: particleCount = len(prodKinPartNames) + len(decayKinPartNames) for event in tree: prodKinMomenta = event.__getattr__(metaData.productionKinematicsMomentaBranchName) decayKinMomenta = event.__getattr__(metaData.decayKinematicsMomentaBranchName) if particleCount != (prodKinMomenta.GetEntries() + decayKinMomenta.GetEntries()): printErr("particle count in metaData does not match particle count in event data.") sys.exit(1) outputEvtFile.write(str(particleCount) + '\n') for particle in range(prodKinMomenta.GetEntries()): if not writeParticleToFile(outputEvtFile, prodKinPartNames[particle], prodKinMomenta[particle]): printErr("failed writing particle '" + particle + "' to output file.") sys.exit(1) for particle in range(decayKinMomenta.GetEntries()): if not writeParticleToFile(outputEvtFile, decayKinPartNames[particle], decayKinMomenta[particle]): printErr("failed writing particle '" + particle + "' to output file.") sys.exit(1) inputFile.Close() printSucc("successfully converted '" + args.inputFileName + "' to '" + args.outputFileName + "'.")<|fim▁end|>
import argparse
<|file_name|>mailbase.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding: utf-8 import email.utils import logging import os import smtplib import threading from email.mime.text import MIMEText from email.MIMEMultipart import MIMEMultipart logger = logging.getLogger("maillog") class MailBase(threading.Thread): mailServerPort = 25 def __init__(self, subject, content, basic_info, attachment=""): """ 多线程邮件处理类 @Params target: file or string basicInfo= { "TOLIST": ["[email protected]"], "SERVER": "mail.ucweb.com", "PORT": 25, #25 if missing "USERNAME": "[email protected]", "PASSWORD": "" } (attachment) :param subject: 邮件标题 :param content: 文件名或内容,文件名超过50字符 :param basic_info: 邮件相关配置 :param attachment: 附件 """ threading.Thread.__init__(self) self._set_basic_info(basic_info) self.subject = subject self.content = content self.attachment = attachment def _set_basic_info(self, basic_info): """ :type basic_info: dict """ self.BASICS = {} basic = ["TOLIST", "SERVER", "USERNAME", "PASSWORD", "PORT"] if isinstance(basic_info, dict): if "PORT" not in basic_info.keys(): basic_info["PORT"] = self.mailServerPort if len(basic_info.keys()) != len(basic): logger.error("params nums not correct~") raise BadEmailSettings("basic_info param error") for basic in basic: if basic in basic_info.keys(): self.BASICS[basic] = basic_info[basic] else: logger.error("mail settings has no %s", basic) raise BadEmailSettings() else: logger.error("basic_info should be a dict") raise BadEmailSettings("basic_info not a dict") def _send_mail(self, subject, content, attachment): subject = subject.decode("utf-8") self._do_send_mail(self.BASICS["TOLIST"], subject, content, attachment) def run(self): if not self.subject or not self.content: return self._send_mail(self.subject, self.content, self.attachment) def _do_send_mail(self, to, subject, content, attachment): msg = MIMEMultipart('related') msg['To'] = ', '.join(to) msg['From'] = email.utils.formataddr((self.BASICS["USERNAME"], self.BASICS["USERNAME"])) msg['Subject'] = subject # msgText = MIMEText(content.encode("utf-8"), "html") msgtext = MIMEText(content, "html") msgtext.set_charset('utf-8') msg.attach(msgtext) if attachment: att = MIMEText(open(attachment, 'rb').read(), 'base64', 'utf-8') att["Content-Type"] = 'application/octet-stream' att["Content-Disposition"] = 'attachment;filename="%s"' % attachment msg.attach(att) server = smtplib.SMTP(self.BASICS["SERVER"], self.BASICS["PORT"]) server.set_debuglevel(False) # show communication with the server server.login(self.BASICS["USERNAME"], self.BASICS["PASSWORD"])<|fim▁hole|> server.quit() class FileMail(MailBase): """ load文件发邮件 """ def __init__(self, subject, mail_file, basic_info, attachment=""): if len(mail_file) <= 50 and os.path.isfile(mail_file): fd = open(mail_file) content = fd.read() content = "<br/>".join(content.split("\n")) fd.close() else: content = "" super(FileMail, self).__init__(subject, content, basic_info, attachment) class BadEmailSettings(Exception): pass<|fim▁end|>
try: server.sendmail(self.BASICS["USERNAME"], to, msg.as_string()) finally:
<|file_name|>alpha_beta.rs<|end_file_name|><|fim▁begin|>use ::NodeCount; use uci; use std::fmt; use std::cmp; use std::cmp::PartialOrd; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::hash::Hash; use std::cmp::Ordering; use std::f32; use std::ops; use board_game_traits::board::GameResult; use board_game_traits::board::ExtendedBoard; use pgn_traits::pgn::PgnBoard; use board_game_traits::board::Color; use board_game_traits::board::Color::*; use self::Score::*; use std::mem; use std::time; use std::sync::{Arc, Mutex}; use uci::TimeRestriction::{Nodes, Mate, GameTime}; use board_game_traits::board::Board; use uci_engine; use uci_engine::UciOption; use search_algorithms::alpha_beta::Score::Loss; use search_algorithms::alpha_beta::Score::Win; use search_algorithms::alpha_beta::Score::Val; use uci::EngineComm; use uci_engine::UciOptionType;<|fim▁hole|>use std::io::Write; type Depth = u16; pub struct AlphaBeta<B: ExtendedBoard> { time_limit: uci::TimeRestriction, options: uci::EngineOptions, start_time: time::Instant, node_counter: NodeCount, root_move_list: Option<Vec<B::Move>>, table: Table<B::HashBoard, B::Move>, engine_comm : Arc<Mutex<uci::EngineComm>>, } impl<B> uci_engine::UciEngine<B> for AlphaBeta<B> where B: ExtendedBoard + PgnBoard + fmt::Debug + Hash + Eq + 'static { fn init() -> Self { AlphaBeta { time_limit: uci::TimeRestriction::Infinite, options: uci::EngineOptions::new(), start_time: time::Instant::now(), node_counter: NodeCount::new(), root_move_list: None, table: Table::new(0), engine_comm: Arc::new(Mutex::new(uci::EngineComm::new())), } } fn uci_options(&mut self) -> Vec<UciOption> { vec![ UciOption { name: "DebugInfo".to_string(), option_type: UciOptionType::Check(false) }, UciOption { name: "hash".to_string(), option_type: UciOptionType::Spin(256, 0, 65536) }, UciOption { name: "threads".to_string(), option_type: UciOptionType::Spin(1, 1, 1) }, UciOption { name: "multipv".to_string(), option_type: UciOptionType::Spin(1, 1, 65536) }, UciOption { name: "UCI_Variant".to_string(), option_type: UciOptionType::Combo( "standard".to_string(), vec!["standard".to_string(), "sjadam".to_string(), "crazyhouse".to_string()]) } ] } fn set_uci_option(&mut self, uci_option: UciOption) { match (uci_option.name.as_str(), uci_option.option_type) { ("DebugInfo", UciOptionType::Check(val)) => self.options.debug_info = val, ("hash", UciOptionType::Spin(hash, _, _)) => self.options.hash_memory = hash as u32, ("threads", UciOptionType::Spin(threads, _, _)) => self.options.threads = threads as u32, ("multipv", UciOptionType::Spin(multipv, _, _)) => self.options.multipv = multipv as u32, ("UCI_Variant", UciOptionType::Combo(variant, _)) => match variant.as_str() { "standard" => self.options.variant = ChessVariant::Standard, "sjadam" => self.options.variant = ChessVariant::Sjadam, "crazyhouse" => self.options.variant = ChessVariant::Crazyhouse, variant => panic!("Unknown game/chess variant {}", variant), }, (name, option_type) => { let message = format!("Unknown option {} of type {:?}", name, option_type); warn!("{}", message); io::stderr().write( message.as_bytes()).unwrap(); io::stderr().write( b"\n").unwrap(); }, } } fn search(self, mut board: B, time_limit: uci::TimeRestriction, engine_comm: Arc<Mutex<EngineComm>>, move_list: Option<Vec<B::Move>>) -> Box<Iterator<Item=UciInfo<B>>> { let max_depth: Depth = match time_limit { uci::TimeRestriction::Depth(d) | uci::TimeRestriction::Mate(d) => d, _ => 128, }; debug_assert!(max_depth > 1); // If the root position is drawn, instamove another drawing move if board.game_result() == Some(GameResult::Draw) { return Box::new(self.preserve_draw(&mut board).into_iter()); } Box::new(DepthIterator::new(self, board, time_limit, engine_comm, move_list)) } } impl<B> AlphaBeta<B> where B: ExtendedBoard + PgnBoard + fmt::Debug + Hash + Eq { fn init_new_search(&mut self, time_limit: uci::TimeRestriction, engine_comm: Arc<Mutex<EngineComm>>, move_list: Option<Vec<B::Move>>) { self.engine_comm = engine_comm; self.start_time = time::Instant::now(); self.table = Table::new(self.options.hash_memory as usize * 1024 * 1024); self.node_counter = NodeCount::new(); self.time_limit = time_limit; self.root_move_list = move_list; } fn search_depth(&mut self, board: &mut B, depth: Depth) -> Option<UciInfo<B>> { let mut pvs: Vec<(Score, Vec<B::Move>)> = vec![]; // Scores and principal variations of searched moves let mut pv_moves: Vec<B::Move> = vec![]; // Moves that have already been returned, for multipv mode for _ in 0..self.options.multipv { // In multipv search, the root position needs to be cleared from the hash for correct behaviour. self.table.remove(&board.hash_board()); let mut moves_to_search = vec![]; board.generate_moves(&mut moves_to_search); let total_move_count = moves_to_search.len(); moves_to_search.retain(|mv| !pv_moves.contains(mv) && (self.root_move_list.as_ref() .map(|moves| moves.contains(mv)) .unwrap_or(true))); if moves_to_search.is_empty() { continue; } if let Some((score, moves)) = // If all moves are to be searched, send None to the function // This way, the root position will still be hashed correctly if moves_to_search.len() == total_move_count { self.find_best_move_ab(board, depth, None) } else { self.find_best_move_ab(board, depth, Some(moves_to_search)) } { if moves.is_empty() { error!("No moves returned when searching at depth {}. Board:\n{:?}", depth, board); } else { pv_moves.push(moves[0].clone()); } pvs.push((score, moves)); } else { return None; // The search has been stopped. Do not send any more data. } } let time_taken = time::Instant::now() - self.start_time; let ms_taken = time_taken.as_secs() as u32 * 1000 + time_taken.subsec_nanos() / 1_000_000; let uci_info = uci::UciInfo { depth: depth, seldepth: depth, time: ms_taken as i64, nodes: self.node_counter.total(), hashfull: self.table.mem_usage as f64 / (self.table.max_memory + 1) as f64, pvs: pvs, color: board.side_to_move() }; if self.options.debug_info { println!("leaf nodes: {}, internal nodes: {}, qsearch nodes: {}, null move cutoffs: {}, null_move_skips: {}, full hash hits:  {}, partial hash hits: {}, hash misses: {}, hash_move_cutoffs: {}", self.node_counter.leaf, self.node_counter.intern, self.node_counter.qsearch, self.node_counter.null_move_cutoffs, self.node_counter.null_move_skips, self.node_counter.hash_full_hits, self.node_counter.hash_partial_hits, self.node_counter.hash_misses, self.node_counter.hash_move_cutoffs) } Some(uci_info) } /// Returns a score, and a list of moves representing the best line it found fn find_best_move_ab (&mut self, board : &mut B, depth : Depth, move_list: Option<Vec<B::Move>>) -> Option<(Score, Vec<B::Move>)> where B: PgnBoard + ExtendedBoard + fmt::Debug + Hash + Eq { if let Some((score, _, mut moves)) = self.find_best_move_ab_rec(board, depth, move_list, Loss(0), Win(0), &[None, None], true) { debug_assert!(!moves.is_empty(), "Found empty pv at depth {}, score was {:?}", depth, score); moves.reverse(); Some((score, moves)) } else { None } } fn find_best_move_ab_rec (&mut self, board: &mut B, depth : Depth, mut move_list: Option<Vec<B::Move>>, mut alpha: Score, beta : Score, killer_moves: &[Option<B::Move>; 2], allow_null_moves: bool) -> Option<(Score, Option<B::Move>, Vec<B::Move>)> where B: PgnBoard + ExtendedBoard + fmt::Debug + Hash + Eq { debug_assert!(alpha <= beta, "alpha <= beta. alpha={:?}, beta={:?}, depth={}, board:\n{:?}", alpha, beta, depth, board); let first_candidate = if let Some(&HashEntry{ref best_reply, score: (ordering, score), depth: entry_depth }) = self.table.get(&board.hash_board()) { if entry_depth >= depth { match ordering { Ordering::Equal => { self.node_counter.hash_full_hits += 1; return Some((score, None, best_reply.iter().cloned().collect())) }, Ordering::Less if score < alpha => { self.node_counter.hash_full_hits += 1; return Some((score, None, best_reply.iter().cloned().collect())) }, Ordering::Greater if score > beta => { self.node_counter.hash_full_hits += 1; return Some((score, None, best_reply.iter().cloned().collect())) }, _ => { self.node_counter.hash_partial_hits += 1; best_reply.clone() } } } else { self.node_counter.hash_partial_hits += 1; best_reply.clone() } } else { self.node_counter.hash_misses += 1; None }; // Check if the thread should stop if self.node_counter.total() % 2048 == 0 { self.abort_search_check(board.side_to_move())?; } // Helpful alias let color = board.side_to_move(); let mut best_line = vec![]; if depth == 0 { if let Some((score, killer_move, best_move, node_type)) = self.qsearch(board, depth, alpha, beta, first_candidate) { self.table.insert(board.hash_board(), HashEntry { best_reply: best_move.clone(), score: (node_type, score), depth }); if let Some(mv) = best_move { return Some((score, killer_move, vec![mv])); } else { return Some((score, killer_move, vec![])); } } else { return None; } } debug_assert!(depth > 0); if let Some(result) = board.game_result() { return Some((Score::from_game_result(result, color), None, Vec::new())) } const R : Depth = 2; if alpha > Loss(Depth::max_value()) && depth >= 2 && allow_null_moves && board.null_move_is_available() { let reverse_null_move = board.do_null_move(); let (tried_score, _, _) = self.find_best_move_ab_rec(board, (depth - 1).saturating_sub(R), None, !decrement_score(beta), !decrement_score(alpha), &[None, None], false)?; board.reverse_null_move(reverse_null_move); if !tried_score >= beta { self.node_counter.null_move_cutoffs += 1; // Return immediately for null move cutoffs. // Do not store hash entries or killer moves. return Some((increment_score(!tried_score), None, vec![])); } else { self.node_counter.null_move_skips += 1; } } self.node_counter.intern += 1; let mut legal_moves = if move_list.is_some() { move_list.take().unwrap() } else { let mut moves = vec![]; board.generate_moves(&mut moves); moves }; // If there is mate or stalemate on the board, we should already have returned debug_assert!(!legal_moves.is_empty(), "Found 0 legal moves, but game result was {:?} on \n{:?}", board.game_result(), board); let mut moves = vec![]; if let Some(mv) = first_candidate.as_ref() { moves.push(mv.clone()); } moves.extend(killer_moves.iter() .flat_map(|mv| mv.iter()) .cloned() .filter(|mv| board.move_is_legal(mv.clone()))); moves.append(&mut legal_moves); let old_eval = board.static_eval() * color.multiplier() as f32; let mut move_searched = false; // ensure not all moves are pruned as null moves let mut node_type = if alpha == Loss(0) && beta == Win(0) { Ordering::Equal // It is currently a pv node } else { Ordering::Less // All-node: True value is < score }; let mut child_killer_moves = [None, None]; for (i, c_move) in moves.iter().enumerate() { #[cfg(debug_assertions)] let old_board = board.clone(); let reverse_move = board.do_move(c_move.clone()); match alpha { // Do null-move pruning // Do not prune if you are getting mated // TODO: Verify sanity Loss(_) => (), _ if depth < 3 && self.options.null_move_pruning && move_searched && !board.game_result().is_some() => { let eval = board.static_eval() * color.multiplier() as f32; if eval < old_eval { board.reverse_move(reverse_move); continue; } } _ => (), } let (mut tried_score, child_killer_move, tried_line) = self.find_best_move_ab_rec(board, depth - 1, None, !decrement_score(beta), !decrement_score(alpha), &child_killer_moves, true)?; if let Some(mv) = child_killer_move { insert_killer_move(&mut child_killer_moves, mv); } tried_score = !tried_score; board.reverse_move(reverse_move); #[cfg(debug_assertions)] debug_assert_eq!(board, &old_board, "Failed to restore board after move {:?}", c_move); if !move_searched { alpha = alpha.max(tried_score); best_line = tried_line.clone(); best_line.push(c_move.clone()); move_searched = true; } if tried_score >= beta { node_type = Ordering::Greater; // True value is >= beta alpha = tried_score; best_line = tried_line.clone(); best_line.push(c_move.clone()); if i == 0 && Some(c_move) == first_candidate.as_ref() { self.node_counter.hash_move_cutoffs += 1; } break; } if tried_score > alpha { alpha = tried_score; best_line = tried_line; best_line.push(c_move.clone()); } } let score = increment_score(alpha); if move_list == None { // When doing multipv search, the position may already be in the hash // In that case, do not overwrite it self.table.insert(board.hash_board(), HashEntry { best_reply: best_line.last().cloned(), score: (node_type, score), depth }); } let killer_move = if node_type == Ordering::Greater { best_line.last().cloned() } else { None }; Some((score, killer_move, best_line)) } fn qsearch(&mut self, board: &mut B, depth: Depth, mut alpha: Score, beta: Score, hash_move: Option<<B as Board>:: Move>) -> Option<(Score, Option<B::Move>, Option<B::Move>, Ordering)> where <B as Board>::Move: PartialEq + fmt::Debug { let color = board.side_to_move(); if let Some(result) = board.game_result() { self.node_counter.leaf += 1; return Some((Score::from_game_result(result, color), None, None, Ordering::Equal)) } // If searching for mate, don't do quiescence search if let Mate(_) = self.time_limit { return Some((Val(board.static_eval() * color.multiplier() as f32), None, None, Ordering::Equal)); } let stand_pat = Val(board.static_eval() * color.multiplier() as f32); if stand_pat >= beta { self.node_counter.leaf += 1; return Some((beta, None, None, Ordering::Equal)); } alpha = alpha.max(stand_pat); let mut node_type = if alpha == Loss(0) && beta == Win(0) { Ordering::Equal // Pv-node } else { Ordering::Less // All-node: True value is < score }; let mut active_moves = vec![]; board.active_moves(&mut active_moves); if let Some(ref mv) = hash_move { // If active moves don't contain the move, search it anyway if let Some(index) = active_moves.iter().position(|mv2| mv == mv2) { active_moves.swap(0, index); } else { active_moves.insert(0, mv.clone()); } } let mut best_move = None; if active_moves.is_empty() { self.node_counter.leaf += 1; } else { self.node_counter.qsearch += 1; } for (i, mv) in active_moves.iter().enumerate() { let reverse_move = board.do_move(mv.clone()); let (mut score, _, _, _) = self.qsearch( board, depth, !decrement_score(beta), !decrement_score(alpha), None)?; score = !score; board.reverse_move(reverse_move); if score >= beta { node_type = Ordering::Greater; // Cute-node: True value is >= beta alpha = score; // TODO: Could be beta as well. Test. best_move = Some(mv.clone()); if i == 0 && hash_move.is_some() { self.node_counter.hash_move_cutoffs += 1; } break; } if score > alpha { alpha = score; best_move = Some(mv.clone()); } } let killer_move = if node_type == Ordering::Greater { best_move.clone() } else { None }; let score = increment_score(alpha); return Some((score, killer_move, best_move, node_type)); } fn abort_search_check(&mut self, to_move: Color) -> Option<()> { { let engine_comm = self.engine_comm.lock().unwrap(); if engine_comm.engine_should_stop { return None // "Engine was told to stop" } } let time_taken = time::Instant::now() - self.start_time; // If we've spent more than half of our time, abort immediately match self.time_limit { Nodes(n) if self.node_counter.total() > n => return None, GameTime(info) => { let time_left = match to_move { White => info.white_time, Black => info.black_time, }; if time_taken > time_left / 2 { None } else { Some(()) } } // If on movetime restriction, abort if we are getting close to our time limit uci::TimeRestriction::MoveTime(time) if time_taken > time - time::Duration::from_millis(10) => { None }, _ => Some(()), } } /// A bugged GUI may not correctly adjudicate draws /// Therefore, if the root position is in fact already a draw, make any move that preserve the draw fn preserve_draw(&self, board: &mut B) -> Option<uci::UciInfo<B>> { let mut moves = vec![]; board.generate_moves(&mut moves); for mv in moves { let reverse_move = board.do_move(mv.clone()); if board.game_result() == Some(GameResult::Draw) { board.reverse_move(reverse_move); let uci_info = uci::UciInfo { depth: 1, seldepth: 1, time: 0, nodes: 1, hashfull: 0.0, pvs: vec![(Score::Draw(0), vec![mv.clone()])], color: board.side_to_move() }; return Some(uci_info); } board.reverse_move(reverse_move); } None } } fn insert_killer_move<T: Eq>(moves: &mut[Option<T>; 2], new_move: T) { if moves[0].is_none() { moves[0] = Some(new_move); } else if moves[1].is_none() && &new_move != moves[0].as_ref().unwrap() { moves[1] = Some(new_move) } else if moves[0].as_ref().unwrap() != &new_move { moves.swap(1, 0); moves[0] = Some(new_move); } } fn increment_score(score: Score) -> Score { match score { Loss(i) => Loss(i + 1), Win(i) => Win(i + 1), Draw(i) => Draw(i + 1), Val(n) => Val(n), } } fn decrement_score(score: Score) -> Score { match score { Loss(i) => Loss(Depth::saturating_sub(i, 1)), Win(i) => Win(Depth::saturating_sub(i, 1)), Draw(i) => Draw(Depth::saturating_sub(i, 1)), Val(n) => Val(n), } } struct DepthIterator<B: ExtendedBoard> { depth: Depth, engine: AlphaBeta<B>, board: B, } impl<B> DepthIterator<B> where B: PgnBoard + ExtendedBoard + fmt::Debug + Hash + Eq + 'static { fn new(engine: AlphaBeta<B>, board: B, time_limit: uci::TimeRestriction, engine_comm: Arc<Mutex<EngineComm>>, move_list: Option<Vec<B::Move>>) -> DepthIterator<B> { let mut iter = DepthIterator { depth: 0, engine: engine, board: board, }; iter.engine.init_new_search(time_limit, engine_comm, move_list); iter } } impl<B: ExtendedBoard> Iterator for DepthIterator<B> where B: PgnBoard + ExtendedBoard + fmt::Debug + Hash + Eq { type Item = UciInfo<B>; fn next(&mut self) -> Option<Self::Item> { self.depth += 1; // If we're playing a time control, don't start searching deeper // if we have little time left let time_taken = time::Instant::now() - self.engine.start_time; match self.engine.time_limit { uci::TimeRestriction::GameTime(info) => { let time_taken = time::Instant::now() - self.engine.start_time; let (time, inc) = match self.board.side_to_move() { White => (info.white_time, info.white_inc), Black => (info.black_time, info.black_inc), }; if time_taken > inc / (B::BRANCH_FACTOR as u32 / 5) + time / (B::BRANCH_FACTOR as u32 * 5) { return None; } }, uci::TimeRestriction::MoveTime(time) => if time_taken > time / (B::BRANCH_FACTOR as u32 / 10) { return None; }, uci::TimeRestriction::Depth(d) | uci::TimeRestriction::Mate(d) => if self.depth > d { return None; }, _ => (), } self.engine.search_depth(&mut self.board, self.depth) } } struct HashEntry<M> { best_reply: Option<M>, score: (Ordering, Score), depth: Depth, } /// A transposition table for storing known positions, which only grows to a certain /// size in memory. struct Table<B, M> { hash_table: HashMap<B, HashEntry<M>>, hits: u64, // Total hits in table lookups: u64, // Total lookups in table mem_usage: usize, max_memory: usize, } impl<B: Eq + Hash, M> Table<B, M> { #[inline(never)] pub fn new(max_memory: usize) -> Table<B, M> { Table { hash_table: HashMap::with_capacity( 6 * max_memory / (10 * Self::value_mem_usage())), hits: 0, lookups: 0, mem_usage: 0, max_memory } } fn value_mem_usage() -> usize { mem::size_of::<HashEntry<M>>() + mem::size_of::<u64>() + mem::size_of::<u64>() } #[inline(never)] pub fn get(&mut self, key: &B) -> Option<&HashEntry<M>> where B: Eq + Hash { self.lookups += 1; let result = self.hash_table.get(&key); if result.is_some() { self.hits += 1; } result } #[inline(never)] pub fn insert(&mut self, key: B, value: HashEntry<M>) where B: Eq + Hash { let extra_mem = Self::value_mem_usage(); match self.hash_table.entry(key) { Entry::Occupied(mut entry) => *entry.get_mut() = value, Entry::Vacant(entry) => if self.mem_usage + extra_mem < 6 * self.max_memory / 10 { self.mem_usage += extra_mem; entry.insert(value); }, } } #[allow(dead_code)] pub fn remove(&mut self, key: &B) -> Option<HashEntry<M>> where B: Eq + Hash { //key.hash(&mut self.hasher); self.hash_table.remove(key).map(|value| { self.mem_usage -= Self::value_mem_usage(); value }) } } /// The evaluation of a position. May be extact (A player wins in n moves) or an approximate evaluation. #[derive(Clone, Copy, Debug, PartialEq)] pub enum Score { Val(f32), Draw(Depth), Win(Depth), Loss(Depth), } impl Score { pub fn from_game_result(result: GameResult, to_move: Color) -> Self { match (result, to_move) { (GameResult::WhiteWin, White) => Win(0), (GameResult::WhiteWin, Black) => Loss(0), (GameResult::BlackWin, Black) => Win(0), (GameResult::BlackWin, White) => Loss(0), (GameResult::Draw, _) => Draw(0), } } #[allow(dead_code)] pub fn to_cp(self, to_move: Color) -> i16 { match self { Val(val) => (val * 100.0) as i16 * to_move.multiplier() as i16, Draw(_) => 0, Win(n) => (12_000 - n as i16) * to_move.multiplier() as i16, Loss(n) => (-12_000 + n as i16) * to_move.multiplier() as i16, } } pub fn to_value(self, to_move: Color) -> f32 { match self { Val(val) => val, Draw(_) => 0.0, Win(_) => 100.0 * to_move.multiplier() as f32, Loss(_) => -100.0 * to_move.multiplier() as f32, } } pub fn uci_string(mut self, to_move: Color) -> String { if to_move == Black { self = !self; } match self { Val(f) => format!("cp {}", (100.0 * f) as i16), Win(n) => format!("mate {}", (1 + n as i16) / 2), Loss(n) => format!("mate -{}", (1 + n as i16) / 2), Draw(_) => format!("0"), } } } impl ops::Not for Score { type Output = Self; fn not(self) -> Self::Output { match self { Val(val) => Val(-val), Draw(n) => Draw(n), Win(n) => Loss(n), Loss(n) => Win(n), } } } impl PartialOrd for Score { fn partial_cmp (&self, other: &Score) -> Option<cmp::Ordering> { match (*self, *other) { (Win(n1), Win(n2)) => Some((&n2).cmp(&n1)), (Win(_), _) => Some(Ordering::Greater), (Val(_), Win(_)) => Some(Ordering::Less), (Val(_), Loss(_)) => Some(Ordering::Greater), (Val(n1), Val(n2)) => (&n1).partial_cmp(&n2), (Val(n1), Draw(_)) => (&n1).partial_cmp(&0.0), (Draw(_), Val(n1)) => (&0.0).partial_cmp(&n1), (Draw(_), Draw(_)) => Some(Ordering::Equal), (Draw(_), Win(_)) => Some(Ordering::Less), (Draw(_), Loss(_)) => Some(Ordering::Greater), (Loss(n1), Loss(n2)) => Some(n1.cmp(&n2)), (Loss(_), _) => Some(Ordering::Less), } } } impl Ord for Score { fn cmp (&self, other: &Score) -> cmp::Ordering { self.partial_cmp(other).unwrap() } } impl Eq for Score {}<|fim▁end|>
use uci::ChessVariant; use uci::UciInfo; use std::io;
<|file_name|>js.py<|end_file_name|><|fim▁begin|>import logging import hashlib from pylons import request, response, session, tmpl_context as c from pylons.controllers.util import abort, redirect_to, etag_cache from pylons.decorators import jsonify from pylons.i18n.translation import _ from wurdig.lib.base import BaseController, render log = logging.getLogger(__name__) class JsController(BaseController): @jsonify def _json(self): translations = { 'Are you positive you want to do that?': _('Are you positive ' 'you want to do that?'), 'The item has successfully been deleted.': _('The item has ' 'successfully been deleted.'), 'Disapprove': _('Disapprove'), 'The item has successfully been approved.': _('The item has ' 'successfully been approved.'), 'Approve': _('Approve'), 'The item has successfully been disapproved.': _('The item has successfully '<|fim▁hole|> 'completed+successfully'), 'An unexpected error has occurred.': _('An unexpected error has occurred.'), 'Enter key word(s)': _('Enter key word(s)') } return translations def translations(self): json_string = "if(!this.WURDIG) {var WURDIG = {};}WURDIG.translate = %s" % self._json() etag_cache(key=hashlib.md5(json_string).hexdigest()) response.content_type = 'application/x-javascript; charset=utf-8' response.cache_control = 'max-age=2592000' response.pragma = '' return json_string<|fim▁end|>
'been disapproved.'), 'Your+request+has+been+completed+successfully': _('Your+request+has+been+'
<|file_name|>bar.rs<|end_file_name|><|fim▁begin|>#![crate_type = "lib"]<|fim▁hole|><|fim▁end|>
extern crate baz;
<|file_name|>maps_http_geocode_place_id.py<|end_file_name|><|fim▁begin|># [START maps_http_geocode_place_id] import requests url = "https://maps.googleapis.com/maps/api/geocode/json?place_id=ChIJd8BlQ2BZwokRAFUEcm_qrcA&key=YOUR_API_KEY" payload={}<|fim▁hole|>print(response.text) # [END maps_http_geocode_place_id]<|fim▁end|>
headers = {} response = requests.request("GET", url, headers=headers, data=payload)
<|file_name|>create_user.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python #-*- coding:utf-8 -* import csv from werkzeug import generate_password_hash<|fim▁hole|>from bootstrap import db def create_user(email, name, password): user = User(email=email, name=name, pwdhash=generate_password_hash(password), is_active=True) db.session.add(user) db.session.commit()<|fim▁end|>
from web.models import User
<|file_name|>worker_parallel_scheduling_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import contextlib import gc import os import pickle import time from helpers import unittest import luigi import mock import psutil from luigi.worker import Worker def running_children(): children = set() process = psutil.Process(os.getpid()) for child in process.children(): if child.is_running(): children.add(child.pid) return children @contextlib.contextmanager def pause_gc(): if not gc.isenabled(): yield try: gc.disable() yield finally: gc.enable()<|fim▁hole|> class SlowCompleteWrapper(luigi.WrapperTask): def requires(self): return [SlowCompleteTask(i) for i in range(4)] class SlowCompleteTask(luigi.Task): n = luigi.IntParameter() def complete(self): time.sleep(0.1) return True class OverlappingSelfDependenciesTask(luigi.Task): n = luigi.IntParameter() k = luigi.IntParameter() def complete(self): return self.n < self.k or self.k == 0 def requires(self): return [OverlappingSelfDependenciesTask(self.n - 1, k) for k in range(self.k + 1)] class ExceptionCompleteTask(luigi.Task): def complete(self): assert False class ExceptionRequiresTask(luigi.Task): def requires(self): assert False class UnpicklableExceptionTask(luigi.Task): def complete(self): class UnpicklableException(Exception): pass raise UnpicklableException() class ParallelSchedulingTest(unittest.TestCase): def setUp(self): self.sch = mock.Mock() self.w = Worker(scheduler=self.sch, worker_id='x') def added_tasks(self, status): return [kw['task_id'] for args, kw in self.sch.add_task.call_args_list if kw['status'] == status] def test_children_terminated(self): before_children = running_children() with pause_gc(): self.w.add( OverlappingSelfDependenciesTask(5, 2), multiprocess=True, ) self.assertLessEqual(running_children(), before_children) def test_multiprocess_scheduling_with_overlapping_dependencies(self): self.w.add(OverlappingSelfDependenciesTask(5, 2), True) self.assertEqual(15, self.sch.add_task.call_count) self.assertEqual(set(( OverlappingSelfDependenciesTask(n=1, k=1).task_id, OverlappingSelfDependenciesTask(n=2, k=1).task_id, OverlappingSelfDependenciesTask(n=2, k=2).task_id, OverlappingSelfDependenciesTask(n=3, k=1).task_id, OverlappingSelfDependenciesTask(n=3, k=2).task_id, OverlappingSelfDependenciesTask(n=4, k=1).task_id, OverlappingSelfDependenciesTask(n=4, k=2).task_id, OverlappingSelfDependenciesTask(n=5, k=2).task_id, )), set(self.added_tasks('PENDING'))) self.assertEqual(set(( OverlappingSelfDependenciesTask(n=0, k=0).task_id, OverlappingSelfDependenciesTask(n=0, k=1).task_id, OverlappingSelfDependenciesTask(n=1, k=0).task_id, OverlappingSelfDependenciesTask(n=1, k=2).task_id, OverlappingSelfDependenciesTask(n=2, k=0).task_id, OverlappingSelfDependenciesTask(n=3, k=0).task_id, OverlappingSelfDependenciesTask(n=4, k=0).task_id, )), set(self.added_tasks('DONE'))) @mock.patch('luigi.notifications.send_error_email') def test_raise_exception_in_complete(self, send): self.w.add(ExceptionCompleteTask(), multiprocess=True) send.check_called_once() self.assertEqual(0, self.sch.add_task.call_count) self.assertTrue('assert False' in send.call_args[0][1]) @mock.patch('luigi.notifications.send_error_email') def test_raise_unpicklable_exception_in_complete(self, send): # verify exception can't be pickled self.assertRaises(Exception, UnpicklableExceptionTask().complete) try: UnpicklableExceptionTask().complete() except Exception as e: ex = e self.assertRaises(pickle.PicklingError, pickle.dumps, ex) # verify this can run async self.w.add(UnpicklableExceptionTask(), multiprocess=True) send.check_called_once() self.assertEqual(0, self.sch.add_task.call_count) self.assertTrue('raise UnpicklableException()' in send.call_args[0][1]) @mock.patch('luigi.notifications.send_error_email') def test_raise_exception_in_requires(self, send): self.w.add(ExceptionRequiresTask(), multiprocess=True) send.check_called_once() self.assertEqual(0, self.sch.add_task.call_count) if __name__ == '__main__': unittest.main()<|fim▁end|>
<|file_name|>consoleInput.py<|end_file_name|><|fim▁begin|>#from: http://stackoverflow.com/questions/10361820/simple-twisted-echo-client #and #from: http://stackoverflow.com/questions/510357/python-read-a-single-character-from-the-user from twisted.internet.threads import deferToThread as _deferToThread from twisted.internet import reactor class ConsoleInput(object): def __init__(self, stopFunction, reconnectFunction): self.stopFunction = stopFunction self.reconnectFunction = reconnectFunction def start(self): self.terminator = 'q' self.restart = 'r' self.getKey = _Getch() self.startReceiving() def startReceiving(self, s = ''): if s == self.terminator: self.stopFunction() elif s == self.restart: self.reconnectFunction() _deferToThread(self.getKey).addCallback(self.startReceiving) else: _deferToThread(self.getKey).addCallback(self.startReceiving) class _Getch:<|fim▁hole|> try: self.impl = _GetchWindows() except ImportError: self.impl = _GetchUnix() def __call__(self): return self.impl() class _GetchUnix: def __init__(self): import tty, sys def __call__(self): import sys, tty, termios fd = sys.stdin.fileno() old_settings = termios.tcgetattr(fd) try: tty.setraw(sys.stdin.fileno()) ch = sys.stdin.read(1) finally: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) return ch class _GetchWindows: def __init__(self): import msvcrt def __call__(self): import msvcrt return msvcrt.getch()<|fim▁end|>
""" Gets a single character from standard input. Does not echo to the screen. """ def __init__(self):
<|file_name|>token.js<|end_file_name|><|fim▁begin|>'use strict'; const path = require('path'); const jwt = require('jsonwebtoken'); const AuthConfig = require(path.resolve('./config')).Auth; const jwtSecret = AuthConfig.jwt.secret; const tokenExpirePeriod = AuthConfig.jwt.tokenExpirePeriod; function generateToken(payLoad) { const isObject = (typeof payLoad === 'object'); if (payLoad) { if (isObject) { return new Promise((resolve, reject) => { jwt.sign(payLoad, jwtSecret, { expiresIn: tokenExpirePeriod }, (error, token) => { if (error) { reject(error); } else { resolve(token); } }); }) } else { const error = new TypeError('Token Payload Must Be An Object'); return Promise.reject(error); } } else { const error = new Error('Token Payload Should Not Be Empty'); return Promise.reject(error); } } function verifyToken(token) { if (token) { return new Promise((resolve, reject) => { jwt.verify(token, jwtSecret, (error, decodedToken) => { if (error) { reject(error); } else { resolve(decodedToken); } }); }) } else { const error = new Error('Token Should Not Be Empty'); return Promise.reject(error);<|fim▁hole|> module.exports = { generate: generateToken, verify: verifyToken };<|fim▁end|>
} }
<|file_name|>csi_block_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package csi import ( "fmt" "os" "path" "path/filepath" "testing" api "k8s.io/api/core/v1" meta "k8s.io/apimachinery/pkg/apis/meta/v1" fakeclient "k8s.io/client-go/kubernetes/fake" "k8s.io/kubernetes/pkg/volume" volumetest "k8s.io/kubernetes/pkg/volume/testing" ) func TestBlockMapperGetGlobalMapPath(t *testing.T) { plug, tmpDir := newTestPlugin(t) defer os.RemoveAll(tmpDir) // TODO (vladimirvivien) specName with slashes will not work testCases := []struct { name string specVolumeName string path string }{ { name: "simple specName", specVolumeName: "spec-0", path: path.Join(tmpDir, fmt.Sprintf("plugins/kubernetes.io/csi/volumeDevices/%s/%s", "spec-0", "dev")), }, { name: "specName with dots", specVolumeName: "test.spec.1", path: path.Join(tmpDir, fmt.Sprintf("plugins/kubernetes.io/csi/volumeDevices/%s/%s", "test.spec.1", "dev")), }, } for _, tc := range testCases { t.Logf("test case: %s", tc.name) pv := makeTestPV(tc.specVolumeName, 10, testDriver, testVol) spec := volume.NewSpecFromPersistentVolume(pv, pv.Spec.PersistentVolumeSource.CSI.ReadOnly) mapper, err := plug.NewBlockVolumeMapper( spec, &api.Pod{ObjectMeta: meta.ObjectMeta{UID: testPodUID, Namespace: testns}}, volume.VolumeOptions{}, ) if err != nil { t.Fatalf("Failed to make a new Mapper: %v", err) } csiMapper := mapper.(*csiBlockMapper) path, err := csiMapper.GetGlobalMapPath(spec) if err != nil { t.Errorf("mapper GetGlobalMapPath failed: %v", err) } if tc.path != path { t.Errorf("expecting path %s, got %s", tc.path, path) } } } func TestBlockMapperSetupDevice(t *testing.T) { plug, tmpDir := newTestPlugin(t)<|fim▁hole|> host := volumetest.NewFakeVolumeHostWithNodeName( tmpDir, fakeClient, nil, "fakeNode", ) plug.host = host pv := makeTestPV("test-pv", 10, testDriver, testVol) pvName := pv.GetName() nodeName := string(plug.host.GetNodeName()) spec := volume.NewSpecFromPersistentVolume(pv, pv.Spec.PersistentVolumeSource.CSI.ReadOnly) // MapDevice mapper, err := plug.NewBlockVolumeMapper( spec, &api.Pod{ObjectMeta: meta.ObjectMeta{UID: testPodUID, Namespace: testns}}, volume.VolumeOptions{}, ) if err != nil { t.Fatalf("failed to create new mapper: %v", err) } csiMapper := mapper.(*csiBlockMapper) csiMapper.csiClient = setupClient(t, true) attachID := getAttachmentName(csiMapper.volumeID, csiMapper.driverName, string(nodeName)) attachment := makeTestAttachment(attachID, nodeName, pvName) attachment.Status.Attached = true _, err = csiMapper.k8s.StorageV1beta1().VolumeAttachments().Create(attachment) if err != nil { t.Fatalf("failed to setup VolumeAttachment: %v", err) } t.Log("created attachement ", attachID) devicePath, err := csiMapper.SetUpDevice() if err != nil { t.Fatalf("mapper failed to SetupDevice: %v", err) } globalMapPath, err := csiMapper.GetGlobalMapPath(spec) if err != nil { t.Fatalf("mapper failed to GetGlobalMapPath: %v", err) } if devicePath != globalMapPath { t.Fatalf("mapper.SetupDevice returned unexpected path %s instead of %v", devicePath, globalMapPath) } vols := csiMapper.csiClient.(*fakeCsiDriverClient).nodeClient.GetNodeStagedVolumes() if vols[csiMapper.volumeID] != devicePath { t.Error("csi server may not have received NodePublishVolume call") } } func TestBlockMapperMapDevice(t *testing.T) { plug, tmpDir := newTestPlugin(t) defer os.RemoveAll(tmpDir) fakeClient := fakeclient.NewSimpleClientset() host := volumetest.NewFakeVolumeHostWithNodeName( tmpDir, fakeClient, nil, "fakeNode", ) plug.host = host pv := makeTestPV("test-pv", 10, testDriver, testVol) pvName := pv.GetName() nodeName := string(plug.host.GetNodeName()) spec := volume.NewSpecFromPersistentVolume(pv, pv.Spec.PersistentVolumeSource.CSI.ReadOnly) // MapDevice mapper, err := plug.NewBlockVolumeMapper( spec, &api.Pod{ObjectMeta: meta.ObjectMeta{UID: testPodUID, Namespace: testns}}, volume.VolumeOptions{}, ) if err != nil { t.Fatalf("failed to create new mapper: %v", err) } csiMapper := mapper.(*csiBlockMapper) csiMapper.csiClient = setupClient(t, true) attachID := getAttachmentName(csiMapper.volumeID, csiMapper.driverName, string(nodeName)) attachment := makeTestAttachment(attachID, nodeName, pvName) attachment.Status.Attached = true _, err = csiMapper.k8s.StorageV1beta1().VolumeAttachments().Create(attachment) if err != nil { t.Fatalf("failed to setup VolumeAttachment: %v", err) } t.Log("created attachement ", attachID) devicePath, err := csiMapper.SetUpDevice() if err != nil { t.Fatalf("mapper failed to SetupDevice: %v", err) } globalMapPath, err := csiMapper.GetGlobalMapPath(csiMapper.spec) if err != nil { t.Fatalf("mapper failed to GetGlobalMapPath: %v", err) } // Map device to global and pod device map path volumeMapPath, volName := csiMapper.GetPodDeviceMapPath() err = csiMapper.MapDevice(devicePath, globalMapPath, volumeMapPath, volName, csiMapper.podUID) if err != nil { t.Fatalf("mapper failed to GetGlobalMapPath: %v", err) } if _, err := os.Stat(filepath.Join(volumeMapPath, volName)); err != nil { if os.IsNotExist(err) { t.Errorf("mapper.MapDevice failed, volume path not created: %s", volumeMapPath) } else { t.Errorf("mapper.MapDevice failed: %v", err) } } pubs := csiMapper.csiClient.(*fakeCsiDriverClient).nodeClient.GetNodePublishedVolumes() if pubs[csiMapper.volumeID] != volumeMapPath { t.Error("csi server may not have received NodePublishVolume call") } } func TestBlockMapperTearDownDevice(t *testing.T) { plug, tmpDir := newTestPlugin(t) defer os.RemoveAll(tmpDir) fakeClient := fakeclient.NewSimpleClientset() host := volumetest.NewFakeVolumeHostWithNodeName( tmpDir, fakeClient, nil, "fakeNode", ) plug.host = host pv := makeTestPV("test-pv", 10, testDriver, testVol) spec := volume.NewSpecFromPersistentVolume(pv, pv.Spec.PersistentVolumeSource.CSI.ReadOnly) // save volume data dir := getVolumeDeviceDataDir(pv.ObjectMeta.Name, plug.host) if err := os.MkdirAll(dir, 0755); err != nil && !os.IsNotExist(err) { t.Errorf("failed to create dir [%s]: %v", dir, err) } if err := saveVolumeData( dir, volDataFileName, map[string]string{ volDataKey.specVolID: pv.ObjectMeta.Name, volDataKey.driverName: testDriver, volDataKey.volHandle: testVol, }, ); err != nil { t.Fatalf("failed to save volume data: %v", err) } unmapper, err := plug.NewBlockVolumeUnmapper(pv.ObjectMeta.Name, testPodUID) if err != nil { t.Fatalf("failed to make a new Unmapper: %v", err) } csiUnmapper := unmapper.(*csiBlockMapper) csiUnmapper.csiClient = setupClient(t, true) globalMapPath, err := csiUnmapper.GetGlobalMapPath(spec) if err != nil { t.Fatalf("unmapper failed to GetGlobalMapPath: %v", err) } err = csiUnmapper.TearDownDevice(globalMapPath, "/dev/test") if err != nil { t.Fatal(err) } // ensure csi client call and node unstaged vols := csiUnmapper.csiClient.(*fakeCsiDriverClient).nodeClient.GetNodeStagedVolumes() if _, ok := vols[csiUnmapper.volumeID]; ok { t.Error("csi server may not have received NodeUnstageVolume call") } // ensure csi client call and node unpblished pubs := csiUnmapper.csiClient.(*fakeCsiDriverClient).nodeClient.GetNodePublishedVolumes() if _, ok := pubs[csiUnmapper.volumeID]; ok { t.Error("csi server may not have received NodeUnpublishVolume call") } }<|fim▁end|>
defer os.RemoveAll(tmpDir) fakeClient := fakeclient.NewSimpleClientset()
<|file_name|>test_session.py<|end_file_name|><|fim▁begin|>import datetime from mock import patch from pretend import stub from gurtel import session def test_annotates_request(): """Annotates request with ``session`` property.""" request = stub( cookies={}, app=stub(secret_key='secret', is_ssl=True, config={}), ) session.session_middleware(request, lambda req: None) assert request.session.secret_key == 'secret' @patch.object(session.JSONSecureCookie, 'save_cookie') def test_sets_cookie_on_response(mock_save_cookie): """Calls ``save_cookie`` on response.""" request = stub( cookies={}, app=stub(secret_key='secret', is_ssl=True, config={}), ) response = stub() session.session_middleware(request, lambda req: response) mock_save_cookie.assert_called_once_with( response, httponly=True, secure=True)<|fim▁hole|>@patch.object(session.JSONSecureCookie, 'save_cookie') @patch.object(session.timezone, 'now') def test_can_set_expiry(mock_now, mock_save_cookie): """Calls ``save_cookie`` on response with expiry date, if configured.""" request = stub( cookies={}, app=stub( secret_key='secret', is_ssl=True, config={'session.expiry_minutes': '1440'}, ), ) response = stub() mock_now.return_value = datetime.datetime(2013, 11, 22) session.session_middleware(request, lambda req: response) mock_save_cookie.assert_called_once_with( response, httponly=True, secure=True, expires=datetime.datetime(2013, 11, 23), )<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub mod cif_parser; use cif_parser::EdgeSign; pub mod profile_parser; use clingo::{ ast::Location, defaults::Non, AllModels, ClingoError, Control, ExternalError, FactBase, FunctionHandler, GenericControl, GenericSolveHandle, OptimalModels, Part, ShowType, SolveMode, Symbol, SymbolType, ToSymbol, }; use profile_parser::{Behavior, ProfileId}; /// This module contains the queries which can be asked to the model and data. pub mod encodings; use anyhow::Result; use encodings::*; use log::info; use serde::Serialize; use std::fmt; use thiserror::Error; type ControlWithFH = GenericControl<Non, Non, Non, MemberFH>; type SolveHandleWithFH<FH> = GenericSolveHandle<Non, Non, Non, FH, Non>; type Labelings = Vec<Prediction>; #[derive(Debug, Clone, Serialize)] pub struct Setting { pub os: bool, pub ep: bool, pub fp: bool, pub fc: bool, } impl Setting { pub fn to_json(&self) -> String { format!( "{{ \"depmat\":{}, \"elempath\":{}, \"forward-propagation\":{}, \"founded-constraints\":{}\n}}", !self.os, self.ep, self.fp, self.fc ) } } impl fmt::Display for Setting { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "\n## Settings\n")?; if !self.os { writeln!(f, "- Dependency matrix combines multiple states.")?; writeln!( f, "- An elementary path from an input must exist to explain changes." )?; } else { writeln!( f, "- All observed changes must be explained by a predecessor." )?; if self.ep { writeln!( f, "- An elementary path from an input must exist to explain changes." )?; } if self.fp { writeln!(f, "- 0-change must be explained.")?; } if self.fc { writeln!(f, "- All observed changes must be explained by an input.")?; } } write!(f, "") } } #[derive(Debug, Error)] #[error("IggyError: {msg}")] pub struct IggyError { pub msg: &'static str, } impl IggyError { fn new(msg: &'static str) -> IggyError { IggyError { msg } } } #[derive(Debug, Clone, ToSymbol, Serialize)] pub struct ObsELabel { start: NodeId, target: NodeId, sign: EdgeSign, } impl fmt::Display for ObsELabel { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.sign { EdgeSign::Plus => write!(f, "{} -> {}", self.start, self.target), EdgeSign::Minus => write!(f, "!{} -> {}", self.start, self.target), } } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, ToSymbol, Serialize)] #[serde(untagged)] pub enum NodeId { Or(String), And(String), } impl fmt::Display for NodeId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { NodeId::Or(s) => write!(f, "{s}"), NodeId::And(s) => write!(f, "{s}"), } } } pub enum CheckResult { Consistent, Inconsistent(Vec<String>), } #[derive(Debug, Clone, Serialize)] pub enum RepairOp { AddEdge(ObsELabel), RemoveEdge(ObsELabel), FlipEdgeDirection(ObsELabel), FlipNodeSign { profile: ProfileId, node: NodeId, direction: Direction, }, NewInfluence { profile: ProfileId, target: NodeId, sign: EdgeSign, }, } impl fmt::Display for RepairOp { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { RepairOp::AddEdge(e) => write!(f, "add edge: {e}"), RepairOp::RemoveEdge(e) => write!(f, "remove edge: {e}"), RepairOp::FlipEdgeDirection(e) => write!(f, "flip direction: {e}"), RepairOp::FlipNodeSign { profile: _, node, direction: Direction::PlusToMinus, } => { write!(f, "flip {node}: + to -") } RepairOp::FlipNodeSign { profile: _, node, direction: Direction::PlusToZero, } => write!(f, "flip {node}: + to 0"), RepairOp::FlipNodeSign { profile: _, node, direction: Direction::ZeroToMinus, } => { write!(f, "flip {node}: 0 to -") } RepairOp::FlipNodeSign { profile: _, node, direction: Direction::ZeroToPlus, } => write!(f, "flip {node}: 0 to +"), RepairOp::FlipNodeSign { profile: _, node, direction: Direction::MinusToPlus, } => { write!(f, "flip {node}: - to +") } RepairOp::FlipNodeSign { profile: _, node, direction: Direction::MinusToZero, } => { write!(f, "flip {node}: - to 0") } RepairOp::FlipNodeSign { profile: _, node, direction: Direction::NotPlusToPlus, } => { write!(f, "flip {node}: notPlus to +") } RepairOp::FlipNodeSign { profile: _, node, direction: Direction::NotMinusToMinus, } => { write!(f, "flip {node}: notMinus to -") } RepairOp::NewInfluence { profile: _, target, sign: EdgeSign::Plus, } => { write!(f, "new increasing influence on {target}") } RepairOp::NewInfluence { profile: _, target, sign: EdgeSign::Minus, } => { write!(f, "new decreasing influence on {target}") } } } } pub fn compute_auto_inputs(graph: &FactBase, json: bool) -> Result<FactBase> { let new_inputs = guess_inputs(graph)?; let x = new_inputs .iter() .map(|y| into_node_id(y.arguments().unwrap()[0]).unwrap()); if json { let y: Vec<NodeId> = x.collect(); let serialized = serde_json::to_string(&y)?; println!(",\"Computed input nodes\":{serialized}"); } else { println!("\nComputed input nodes: {}", new_inputs.len()); for y in x { println!("- {y}"); } } Ok(new_inputs) } pub fn check_observations(profile: &FactBase) -> Result<CheckResult> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec![])?; // add a logic program to the base part ctl.add("base", &[], PRG_CONTRADICTORY_OBS)?; ctl.add_facts(profile)?; // ground the base part let part = Part::new("base", vec![])?; let parts = vec![part]; ctl.ground(&parts)?; // solve let mut handle = ctl.solve(SolveMode::YIELD, &[])?; handle.resume()?; match handle.model() { Ok(Some(model)) => { let atoms = model.symbols(ShowType::SHOWN)?; if atoms.is_empty() { // close the solve handle handle.close()?; return Ok(CheckResult::Consistent); } let mut v = vec![]; for atom in atoms { let node = atom .arguments()? .get(1) .ok_or_else(|| IggyError::new("Expected atom with at least two arguments."))? .arguments()? .get(0) .ok_or_else(|| IggyError::new("Expected function with at least one argument."))? .to_string(); match atom.name()? { "contradiction1" => { v.push(format!( "Simultaneous 0 and + behavior in node {node} is contradictory." )); } "contradiction2" => { v.push(format!( "Simultaneous 0 and - behavior in node {node} is contradictory." )); } "contradiction3" => { v.push(format!( "Simultaneous + and - behavior in node {node} is contradictory." )); } "contradiction4" => { v.push(format!( "Simultaneous notMinus and - behavior in node {node} is contradictory." )); } "contradiction5" => { v.push(format!( "Simultaneous notPlus and + behavior in node {node} is contradictory." )); } "contradiction6" => { v.push(format!( "Behavior -(decrease) while initial level is set to Min in node {node} is contradictory." )); } "contradiction7" => { v.push(format!( "Behavior +(increase) while initial level is set to Max in node {node} is contradictory." )); } _ => { v.push("Unknown contradiction in observations".to_string()); } } } Ok(CheckResult::Inconsistent(v)) } _ => panic!("Expected model!"), } } pub fn guess_inputs(graph: &FactBase) -> Result<FactBase> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec![])?; // add a logic program to the base part ctl.add("base", &[], PRG_GUESS_INPUTS)?; ctl.add_facts(graph)?; // ground the base part let part = Part::new("base", vec![])?; let parts = vec![part]; ctl.ground(&parts)?; // solve let mut handle = ctl.solve(SolveMode::YIELD, &[])?; handle.resume()?; let mut inputs = FactBase::new(); if let Ok(Some(model)) = handle.model() { let atoms = model.symbols(ShowType::SHOWN)?; if !atoms.is_empty() { for atom in atoms { inputs.insert(&atom); } } } // close the solve handle handle.close()?; Ok(inputs) } fn member(elem: Symbol, list: Symbol) -> Symbol { match list.symbol_type() { Ok(SymbolType::Function) => { let name = list.name().unwrap(); let arguments = list.arguments().unwrap(); if name == "conc" && arguments.len() == 2 && elem != arguments[1] { member(elem, arguments[0]) } else if name == "conc" && arguments.len() == 2 && elem == arguments[1] { Symbol::create_id("true", true).unwrap() } else if elem == list { Symbol::create_id("true", true).unwrap() } else { Symbol::create_id("false", true).unwrap() } } Ok(_) => { if elem == list { Symbol::create_id("true", true).unwrap() } else { Symbol::create_id("false", true).unwrap() } } Err(e) => panic!("symbol_type() returned error: {e}"), } } struct MemberFH; impl FunctionHandler for MemberFH { fn on_external_function( &mut self, _location: &Location, name: &str, arguments: &[Symbol], ) -> Result<Vec<Symbol>, ExternalError> { if name == "member" && arguments.len() == 2 { let element = arguments[0]; let list = arguments[1]; let res = member(element, list); Ok(vec![res]) } else { eprintln!("name: {name}"); Err(ExternalError { msg: "unknown external function!", }) } } } fn ground_and_solve(ctl: Control) -> Result<SolveHandleWithFH<MemberFH>> { // declare extern function handler let member_fh = MemberFH; // ground the base part let part = Part::new("base", vec![])?; let parts = vec![part]; let mut ctl = ctl.register_function_handler(member_fh); ctl.ground(&parts) .expect("ground with event handler did not work."); // solve let x = ctl.solve(SolveMode::YIELD, &[])?; Ok(x) } fn ground(ctl: Control) -> Result<ControlWithFH> { // declare extern function handler let member_fh = MemberFH; // ground the base part let part = Part::new("base", vec![])?; let parts = vec![part]; let mut ctl = ctl.register_function_handler(member_fh); ctl.ground(&parts)?; Ok(ctl) } fn cautious_consequences_optimal_models( handle: &mut SolveHandleWithFH<MemberFH>, ) -> Result<Vec<Symbol>> { let mut symbols = vec![]; loop { handle.resume()?; match handle.model()? { Some(model) => { if model.optimality_proven()? { symbols = model.symbols(ShowType::SHOWN)?; } } None => break, } } Ok(symbols) } fn get_optimum<FH: FunctionHandler>(handle: &mut SolveHandleWithFH<FH>) -> Result<Vec<i64>> { let mut last = vec![]; let mut found = false; loop { handle.resume()?; match handle.model()? { Some(model) => { if model.optimality_proven()? { return Ok(model.cost()?); } else { found = true; last = model.cost()?; } } None => { if found { return Ok(last); } else { panic!("Error: no optimal model found!"); } } } } } /// return the minimal inconsistent cores pub fn get_minimal_inconsistent_cores( graph: &FactBase, profile: &FactBase, inputs: &FactBase, setting: &Setting, ) -> Result<Mics> { info!("Computing minimal inconsistent cores (mic\'s) ..."); // create a control object and pass command line arguments let mut ctl: Control = clingo::control(vec![ "0".to_string(), "--dom-mod=5,16".to_string(), "--heu=Domain".to_string(), "--enum-mode=domRec".to_string(), ])?; ctl.add_facts(graph)?; ctl.add_facts(profile)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_MICS)?; if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } // ground & solve let ctl = ground(ctl)?; Ok(Mics(ctl.all_models()?)) } pub struct Mics(AllModels<Non, Non, Non, MemberFH, Non>); impl Iterator for Mics { type Item = Vec<Symbol>; fn next(&mut self) -> Option<Self::Item> { match self.0.next() { None => None, Some(model) => { let extract = extract_mics(&model.symbols); match extract { Ok(x) => Some(x), _ => None, } } } } } /// returns the scenfit of data and model pub fn get_scenfit( graph: &FactBase, profile: &FactBase, inputs: &FactBase, setting: &Setting, ) -> Result<i64> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec![ "0".to_string(), "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), ])?; ctl.add_facts(graph)?; ctl.add_facts(profile)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; // ground & solve let mut handle = ground_and_solve(ctl)?; Ok(get_optimum(&mut handle)?[0]) } /// returns a vector of scenfit labelings of data and model /// /// # Arguments: /// /// + number - maximal number of labelings pub fn get_scenfit_labelings( graph: &FactBase, profile: &FactBase, inputs: &FactBase, number: u32, setting: &Setting, ) -> Result<LabelsRepair> { info!("Compute scenfit labelings ..."); // create a control object and pass command line arguments let mut ctl = clingo::control(vec![ format!("{number}"), "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), "--project".to_string(), ])?; ctl.add_facts(graph)?; ctl.add_facts(profile)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; ctl.add("base", &[], PRG_SHOW_ERRORS)?; ctl.add("base", &[], PRG_SHOW_LABELS)?; // ground & solve let ctl = ground(ctl)?; Ok(LabelsRepair(ctl.optimal_models()?)) } pub struct LabelsRepair(OptimalModels<Non, Non, Non, MemberFH, Non>); impl Iterator for LabelsRepair { type Item = (Vec<Prediction>, Vec<RepairOp>); fn next(&mut self) -> Option<Self::Item> { match self.0.next() { None => None, Some(model) => { let extract = extract_labels_repairs(&model.symbols); match extract { Ok(x) => Some(x), _ => None, } } } } } /// returns the mcos of data and model pub fn get_mcos( graph: &FactBase, profile: &FactBase, inputs: &FactBase, setting: &Setting, ) -> Result<i64> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec![ "0".to_string(), "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), ])?; ctl.add_facts(graph)?; ctl.add_facts(profile)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_ADD_INFLUENCES)?; ctl.add("base", &[], PRG_MIN_ADDED_INFLUENCES)?; ctl.add("base", &[], PRG_KEEP_OBSERVATIONS)?; // ground & solve let mut handle = ground_and_solve(ctl)?; Ok(get_optimum(&mut handle)?[0]) } /// returns a vector of mcos labelings of data and model /// /// # Arguments: /// /// + number - maximal number of labelings pub fn get_mcos_labelings( graph: &FactBase, profile: &FactBase, inputs: &FactBase, number: u32, setting: &Setting, ) -> Result<LabelsRepair> { info!("Compute mcos labelings ..."); // create a control object and pass command line arguments let mut ctl = clingo::control(vec![ format!("{number}"), "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), "--project".to_string(), ])?; ctl.add_facts(graph)?; ctl.add_facts(profile)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_ADD_INFLUENCES)?; ctl.add("base", &[], PRG_MIN_ADDED_INFLUENCES)?; ctl.add("base", &[], PRG_KEEP_OBSERVATIONS)?; ctl.add("base", &[], PRG_SHOW_REPAIRS)?; ctl.add("base", &[], PRG_SHOW_LABELS)?; // ground & solve let ctl = ground(ctl)?; Ok(LabelsRepair(ctl.optimal_models()?)) } pub fn get_predictions_under_mcos( graph: &FactBase, profile: &FactBase, inputs: &FactBase, setting: &Setting, ) -> Result<Predictions> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec![ "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), "--enum-mode=cautious".to_string(), // format!("--opt-bound={opt}") ])?; ctl.add_facts(graph)?; ctl.add_facts(profile)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_ADD_INFLUENCES)?; ctl.add("base", &[], PRG_MIN_ADDED_INFLUENCES)?; ctl.add("base", &[], PRG_KEEP_OBSERVATIONS)?; if setting.os { ctl.add("base", &[], PRG_PREDICTIONS)?; } else { ctl.add("base", &[], PRG_PREDICTIONS_DM)?; } // ground & solve let mut handle = ground_and_solve(ctl)?; let model = cautious_consequences_optimal_models(&mut handle)?; extract_predictions(&model) } pub fn get_predictions_under_scenfit( graph: &FactBase, profile: &FactBase, inputs: &FactBase, setting: &Setting, ) -> Result<Predictions> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec![ "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), "--enum-mode=cautious".to_string(), // format!("--opt-bound={opt}") ])?; ctl.add_facts(graph)?; ctl.add_facts(profile)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; if setting.os { ctl.add("base", &[], PRG_PREDICTIONS)?; } else { ctl.add("base", &[], PRG_PREDICTIONS_DM)?; } // ground & solve let mut handle = ground_and_solve(ctl)?; let model = cautious_consequences_optimal_models(&mut handle)?; extract_predictions(&model) } fn extract_addeddy(symbols: &[Symbol]) -> Result<Symbol> { for a in symbols { if a.name()? == "addeddy" { let edge_end = a.arguments()?[0]; return Ok(Symbol::create_function("edge_end", &[edge_end], true)?); } } Err(IggyError::new("Expected addeddy(X) atom in the answer!").into()) } fn extract_addedges(symbols: &[Symbol]) -> Result<FactBase> { let mut ret = FactBase::new(); for a in symbols { if a.name()? == "addedge" { ret.insert(a); } } Ok(ret) } pub fn into_node_id(symbol: Symbol) -> Result<NodeId> { match symbol.name()? { "or" => { let arguments = symbol.arguments()?; let s = arguments[0].string()?; Ok(NodeId::Or(s.to_string())) } "and" => { let arguments = symbol.arguments()?; let s = arguments[0].string()?; Ok(NodeId::And(s.to_string())) } _ => { panic!("unmatched node type: {symbol}"); } } } pub fn into_behavior(symbol: Symbol) -> Result<Behavior> { match symbol.to_string().as_ref() { "1" => Ok(Behavior::Plus), "-1" => Ok(Behavior::Minus), "0" => Ok(Behavior::Zero), "notPlus" => Ok(Behavior::NotPlus), "notMinus" => Ok(Behavior::NotMinus), "change" => Ok(Behavior::Change), x => { panic!("Unexpected behavior: {x}"); } } } #[derive(Debug, Clone, Serialize)] pub enum Direction { PlusToZero, PlusToMinus, MinusToZero, MinusToPlus, ZeroToPlus, ZeroToMinus, NotMinusToMinus, NotPlusToPlus, } pub fn into_repair(symbol: &Symbol) -> Result<RepairOp> { match symbol.name()? { "addedge" => { let arguments = symbol.arguments()?; let start = into_node_id(arguments[0])?; let target = into_node_id(arguments[1])?; let sign = match arguments[2].number() { Ok(1) => EdgeSign::Plus, Ok(-1) => EdgeSign::Minus, _ => panic!("unexpected EdgeSign"), }; Ok(RepairOp::AddEdge(ObsELabel { start, target, sign, })) } "remedge" => { let arguments = symbol.arguments()?; let start = into_node_id(arguments[0])?; let target = into_node_id(arguments[1])?; let sign = match arguments[2].number() { Ok(1) => EdgeSign::Plus, Ok(-1) => EdgeSign::Minus, _ => panic!("unexpected EdgeSign"), }; Ok(RepairOp::RemoveEdge(ObsELabel { start, target, sign, })) } "flip" => { let arguments = symbol.arguments()?; let start = into_node_id(arguments[0])?; let target = into_node_id(arguments[1])?; let sign = match arguments[2].number() { Ok(1) => EdgeSign::Plus, Ok(-1) => EdgeSign::Minus, _ => panic!("unexpected EdgeSign"), }; Ok(RepairOp::FlipEdgeDirection(ObsELabel { start, target, sign, })) } "flip_node_sign_Plus_to_0" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let node = into_node_id(arguments[1])?; let direction = Direction::PlusToZero; Ok(RepairOp::FlipNodeSign { profile, node, direction, }) } "flip_node_sign_Plus_to_Minus" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let node = into_node_id(arguments[1])?; let direction = Direction::PlusToMinus; Ok(RepairOp::FlipNodeSign { profile, node, direction, }) } "flip_node_sign_Minus_to_0" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let node = into_node_id(arguments[1])?; let direction = Direction::MinusToZero; Ok(RepairOp::FlipNodeSign { profile, node, direction, }) } "flip_node_sign_Minus_to_Plus" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let node = into_node_id(arguments[1])?; let direction = Direction::MinusToPlus; Ok(RepairOp::FlipNodeSign { profile, node, direction, }) } "flip_node_sign_0_to_Plus" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let node = into_node_id(arguments[1])?; let direction = Direction::ZeroToPlus; Ok(RepairOp::FlipNodeSign { profile, node, direction, }) } "flip_node_sign_0_to_Minus" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let node = into_node_id(arguments[1])?; let direction = Direction::ZeroToMinus; Ok(RepairOp::FlipNodeSign { profile, node, direction, }) } "flip_node_sign_notMinus_to_Minus" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let node = into_node_id(arguments[1])?; let direction = Direction::NotMinusToMinus; Ok(RepairOp::FlipNodeSign { profile, node, direction, }) } "flip_node_sign_notPlus_to_Plus" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let node = into_node_id(arguments[1])?; let direction = Direction::NotPlusToPlus; Ok(RepairOp::FlipNodeSign { profile, node, direction, }) } "new_influence" => { let arguments = symbol.arguments()?; let profile = arguments[0].string()?.to_string(); let target = into_node_id(arguments[1])?; let sign = match arguments[2].number() { Ok(1) => EdgeSign::Plus, Ok(-1) => EdgeSign::Minus, _ => panic!("unexpected EdgeSign"), }; Ok(RepairOp::NewInfluence { profile, target, sign, }) } _ => { panic!("unmatched repair type: {symbol}"); } } } /// only apply with elementary path consistency notion pub fn get_opt_add_remove_edges_greedy( graph: &FactBase, profiles: &FactBase, inputs: &FactBase, ) -> Result<(i64, i64, std::vec::Vec<FactBase>)> { let mut ctl = clingo::control(vec![ "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), "--project".to_string(), ])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_FWD_PROP)?; ctl.add("base", &[], PRG_ELEM_PATH)?; ctl.add("base", &[], PRG_REMOVE_EDGES)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; ctl.add("base", &[], PRG_SHOW_REPAIRS)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; // ground & solve let mut handle = ground_and_solve(ctl)?; let optima = get_optimum(&mut handle)?; let mut bscenfit = optima[0]; let mut brepscore = optima[1]; let mut fedges: Vec<(FactBase, i64, i64)> = vec![(FactBase::new(), bscenfit, brepscore)]; let mut tedges = vec![]; while let Some((oedges, oscenfit, orepscore)) = fedges.pop() { if oscenfit == 0 && oedges.len() * 2 >= (orepscore - 1) as usize { // early return let tuple = (oedges, oscenfit, orepscore); if !tedges.contains(&tuple) && oscenfit == bscenfit && orepscore == brepscore { tedges.push(tuple); } continue; } // extend till no better solution can be found let mut end = true; // assume this time it's the end let mut ctl = clingo::control(vec![ "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), "--project".to_string(), ])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add_facts(&oedges)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_FWD_PROP)?; ctl.add("base", &[], PRG_ELEM_PATH)?; ctl.add("base", &[], PRG_REMOVE_EDGES)?; ctl.add("base", &[], PRG_BEST_ONE_EDGE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; ctl.add("base", &[], PRG_SHOW_ADD_EDGE_END)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; // ground & solve let mut handle = ground_and_solve(ctl)?; // seach best edge end loop loop { handle.resume()?; match handle.model()? { Some(model) => { if model.optimality_proven()? { let symbols = model.symbols(ShowType::SHOWN)?; let cost = model.cost()?; let nscenfit = cost[0]; let nrepscore = cost[1]; if nscenfit < oscenfit || nrepscore < orepscore { // better score or more that 1 scenfit let nend = extract_addeddy(&symbols)?; let mut f_end = FactBase::new(); f_end.insert(&nend); let mut ctl2 = clingo::control(vec![ "--opt-strategy=5".to_string(), "--opt-mode=optN".to_string(), "--project".to_string(), ])?; ctl2.add_facts(graph)?; ctl2.add_facts(profiles)?; ctl2.add_facts(inputs)?; ctl2.add_facts(&oedges)?; ctl2.add_facts(&f_end)?; ctl2.add("base", &[], PRG_SIGN_CONS)?; ctl2.add("base", &[], PRG_BWD_PROP)?; ctl2.add("base", &[], PRG_FWD_PROP)?; ctl2.add("base", &[], PRG_ELEM_PATH)?; ctl2.add("base", &[], PRG_REMOVE_EDGES)?; ctl2.add("base", &[], PRG_BEST_EDGE_START)?; ctl2.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; ctl2.add("base", &[], PRG_SHOW_REPAIRS)?; ctl2.add("base", &[], PRG_ERROR_MEASURE)?; ctl2.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl2.add("base", &[], PRG_KEEP_INPUTS)?; // ground & solve let mut handle2 = ground_and_solve(ctl2)?; // seach best edge start loop loop { handle2.resume()?; match handle2.model()? { Some(model) => { if model.optimality_proven()? { let symbols2 = model.symbols(ShowType::SHOWN)?; let n2scenfit = model.cost()?[0]; let n2repscore = model.cost()?[1]; if n2scenfit < oscenfit || n2repscore < orepscore { // better score or more that 1 scenfit if n2scenfit < bscenfit { bscenfit = n2scenfit; // update bscenfit brepscore = n2repscore; } if n2scenfit == bscenfit && n2repscore < brepscore { brepscore = n2repscore; } let nedges = extract_addedges(&symbols2)?; let tuple = (nedges.clone(), n2scenfit, n2repscore); if !fedges.contains(&tuple) { fedges.push(tuple); } end = false; } } } None => break, } } } if end { // could not get better let tuple = (oedges.clone(), oscenfit, orepscore); if !tedges.contains(&tuple) && oscenfit == bscenfit && orepscore == brepscore { tedges.push(tuple); } } } } None => break, } } } // take only the results with the best scenfit let mut redges = vec![]; for (tedges, tscenfit, trepscore) in tedges { if tscenfit == bscenfit && trepscore == brepscore { redges.push(tedges); } } Ok((bscenfit, brepscore, redges)) } /// only apply with elementary path consistency notion pub fn get_opt_repairs_add_remove_edges_greedy( graph: &FactBase, profiles: &FactBase, inputs: &FactBase, edges: &FactBase, scenfit: i64, repair_score: i64, max_solutions: u32, ) -> Result<Vec<std::vec::Vec<clingo::Symbol>>> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec![ max_solutions.to_string(), "--opt-strategy=5".to_string(), format!("--opt-mode=optN,{scenfit},{repair_score}"), "--project".to_string(), ])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add_facts(edges)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_FWD_PROP)?; ctl.add("base", &[], PRG_ELEM_PATH)?; ctl.add("base", &[], PRG_REMOVE_EDGES)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; ctl.add("base", &[], PRG_SHOW_REPAIRS)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; // ground & solve let ctl = ground(ctl)?; let models = ctl.optimal_models()?; models .map(|model| extract_repairs(&model.symbols)) .collect() } pub fn get_opt_add_remove_edges( graph: &FactBase, profiles: &FactBase, inputs: &FactBase, setting: &Setting, ) -> Result<(i64, i64)> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec!["--opt-strategy=5".to_string()])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { panic!( "error query.get_opt_add_remove_edges should not be called with elementary path constraint, use instead get_opt_add_remove_edges_greedy" ); } ctl.add("base", &[], PRG_REMOVE_EDGES)?; ctl.add("base", &[], PRG_ADD_EDGES)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; // ground & solve let part = Part::new("base", vec![])?; let parts = vec![part]; ctl.ground(&parts)?; // solve let mut handle = ctl.solve(SolveMode::YIELD, &[])?; let cost = get_optimum(&mut handle)?; Ok((cost[0], cost[1])) } pub fn get_opt_repairs_add_remove_edges( graph: &FactBase, profiles: &FactBase, inputs: &FactBase, scenfit: i64, repair_score: i64, max_solutions: u32, setting: &Setting, ) -> Result<Vec<std::vec::Vec<clingo::Symbol>>> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec![ max_solutions.to_string(), "--opt-strategy=5".to_string(), "--project".to_string(), format!("--opt-mode=optN,{scenfit},{repair_score}"), ])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_REMOVE_EDGES)?; ctl.add("base", &[], PRG_ADD_EDGES)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; ctl.add("base", &[], PRG_SHOW_REPAIRS)?; // ground & solve let ctl = ground(ctl)?; let models = ctl.optimal_models()?; models .map(|model| extract_repairs(&model.symbols)) .collect() } pub fn get_opt_flip_edges( graph: &FactBase, profiles: &FactBase, inputs: &FactBase, setting: &Setting, ) -> Result<(i64, i64)> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec!["--opt-strategy=5".to_string()])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_FLIP_EDGE_DIRECTIONS)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; // ground & solve let mut handle = ground_and_solve(ctl)?; let cost = get_optimum(&mut handle)?; Ok((cost[0], cost[1])) } pub fn get_opt_repairs_flip_edges( graph: &FactBase, profiles: &FactBase, inputs: &FactBase, scenfit: i64, repair_score: i64, max_solutions: u32, setting: &Setting, ) -> Result<Vec<std::vec::Vec<clingo::Symbol>>> { let mut ctl = clingo::control(vec![ max_solutions.to_string(), "--opt-strategy=5".to_string(), "--project".to_string(), format!("--opt-mode=optN,{scenfit},{repair_score}"), ])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_FLIP_EDGE_DIRECTIONS)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; ctl.add("base", &[], PRG_SHOW_FLIP)?; // ground & solve let ctl = ground(ctl)?; let models = ctl.optimal_models()?; models.map(|model| extract_flips(&model.symbols)).collect() } pub fn get_opt_remove_edges( graph: &FactBase, profiles: &FactBase, inputs: &FactBase, setting: &Setting, ) -> Result<(i64, i64)> { // create a control object and pass command line arguments let mut ctl = clingo::control(vec!["--opt-strategy=5".to_string()])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_REMOVE_EDGES)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; // ground & solve let mut handle = ground_and_solve(ctl)?; let cost = get_optimum(&mut handle)?; Ok((cost[0], cost[1])) } pub fn get_opt_repairs_remove_edges( graph: &FactBase, profiles: &FactBase, inputs: &FactBase, scenfit: i64, repair_score: i64, max_solutions: u32, setting: &Setting, ) -> Result<Vec<std::vec::Vec<clingo::Symbol>>> { let mut ctl = clingo::control(vec![ max_solutions.to_string(), "--opt-strategy=5".to_string(), "--project".to_string(), format!("--opt-mode=optN,{scenfit},{repair_score}"), ])?; ctl.add_facts(graph)?; ctl.add_facts(profiles)?; ctl.add_facts(inputs)?; ctl.add("base", &[], PRG_SIGN_CONS)?; ctl.add("base", &[], PRG_BWD_PROP)?; ctl.add("base", &[], PRG_ERROR_MEASURE)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_ERROR)?; ctl.add("base", &[], PRG_KEEP_INPUTS)?; if setting.os { ctl.add("base", &[], PRG_ONE_STATE)?; } if setting.fp { ctl.add("base", &[], PRG_FWD_PROP)?; } if setting.fc { ctl.add("base", &[], PRG_FOUNDEDNESS)?; } if setting.ep { ctl.add("base", &[], PRG_ELEM_PATH)?; } ctl.add("base", &[], PRG_REMOVE_EDGES)?; ctl.add("base", &[], PRG_MIN_WEIGHTED_REPAIRS)?; ctl.add("base", &[], PRG_SHOW_REPAIRS)?; // ground & solve let ctl = ground(ctl)?; let models = ctl.optimal_models()?; models .map(|model| extract_repairs(&model.symbols)) .collect() } /// Given a model this function returns a vector of mics fn extract_mics(symbols: &[Symbol]) -> Result<Vec<Symbol>> { let mut mics = vec![]; for symbol in symbols { match symbol.name()? { "active" => { let id = symbol.arguments()?[0]; mics.push(id); } _ => continue, } } Ok(mics) } /// Given a model this function returns a vector of pairs (node,label) /// and a vector of repair operations needed to make the labeling consistent fn extract_labels_repairs(symbols: &[Symbol]) -> Result<(Labelings, Vec<RepairOp>)> { let mut vlabels = vec![]; let mut err = vec![]; for symbol in symbols { match symbol.name()? { "vlabel" => { let id = symbol.arguments()?[1]; // only return or nodes if id.name()? == "or" {<|fim▁hole|> behavior, }); } } "flip_node_sign_Plus_to_0" => { err.push(into_repair(symbol)?); } "flip_node_sign_Plus_to_Plus" => { err.push(into_repair(symbol)?); } "flip_node_sign_Minus_to_0" => { err.push(into_repair(symbol)?); } "flip_node_sign_Minus_to_Plus" => { err.push(into_repair(symbol)?); } "flip_node_sign_0_to_Plus" => { err.push(into_repair(symbol)?); } "flip_node_sign_0_to_Minus" => { err.push(into_repair(symbol)?); } "flip_node_sign_notPlus_to_Plus" => { err.push(into_repair(symbol)?); } "flip_node_sign_notMinus_to_Minus" => { err.push(into_repair(symbol)?); } "addedge" => { err.push(into_repair(symbol)?); } "remedge" => { err.push(into_repair(symbol)?); } "new_influence" => { err.push(into_repair(symbol)?); } _ => continue, } } Ok((vlabels, err)) } /// Given a model this function returns a vector of symbols /// denoting repair operations needed to make the labeling consistent fn extract_repairs(symbols: &[Symbol]) -> Result<Vec<Symbol>> { let mut rep = vec![]; for symbol in symbols { match symbol.name()? { "addedge" => { rep.push(*symbol); } "remedge" => { rep.push(*symbol); } "flip" => { rep.push(*symbol); } "new_influence" => { rep.push(*symbol); } _ => continue, } } Ok(rep) } /// Given a model this function returns a vector of symbols /// denoting edge flip operations needed to make the labeling consistent fn extract_flips(symbols: &[Symbol]) -> Result<Vec<Symbol>> { let mut rep = vec![]; for symbol in symbols { match symbol.name()? { "flip" => { rep.push(*symbol); } _ => continue, } } Ok(rep) } type Predictions = Vec<Prediction>; #[derive(Debug, Clone, Serialize)] pub struct Prediction { pub node: String, pub behavior: Behavior, } impl fmt::Display for Prediction { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} = {}", self.node, self.behavior) } } /// Given a model this function returns a Vector of Predictions fn extract_predictions(symbols: &[Symbol]) -> Result<Predictions> { let mut predictions = Vec::new(); let mut not_plus = Vec::new(); let mut not_minus = Vec::new(); let mut change = Vec::new(); for symbol in symbols { match symbol.name()? { "pred" => { let id = symbol.arguments()?[1]; // only return or nodes if id.name()? == "or" { match symbol.arguments()?[2].to_string().as_ref() { "1" => { predictions.push(Prediction { node: id.arguments()?[0].string()?.to_string(), behavior: Behavior::Plus, }); } "-1" => { predictions.push(Prediction { node: id.arguments()?[0].string()?.to_string(), behavior: Behavior::Minus, }); } "0" => { predictions.push(Prediction { node: id.arguments()?[0].string()?.to_string(), behavior: Behavior::Zero, }); } "notPlus" => { not_plus.push(id.arguments()?[0].string()?.to_string()); } "notMinus" => { not_minus.push(id.arguments()?[0].string()?.to_string()); } "change" => { change.push(id.arguments()?[0].string()?.to_string()); } x => { panic!("Unexpected predicted behavior: {x}"); } } } } _ => { panic!("Unexpected predicate: {symbol}"); } } } for pred in &predictions { if let Some(index) = not_minus.iter().position(|x| *x == *pred.node) { not_minus.remove(index); } if let Some(index) = not_plus.iter().position(|x| *x == *pred.node) { not_plus.remove(index); } if let Some(index) = change.iter().position(|x| *x == *pred.node) { change.remove(index); } } for node in not_minus { predictions.push(Prediction { node, behavior: Behavior::NotMinus, }); } for node in not_plus { predictions.push(Prediction { node, behavior: Behavior::NotPlus, }); } for node in change { predictions.push(Prediction { node, behavior: Behavior::Change, }); } Ok(predictions) }<|fim▁end|>
let behavior = into_behavior(symbol.arguments()?[2])?; vlabels.push(Prediction { node: id.arguments()?[0].string()?.to_string(),
<|file_name|>event-handler.js<|end_file_name|><|fim▁begin|>/** * socket.io application object & request wrapper * * The first few requests when hitting the site will be pre-cached, as the socket won't * have had time to connect yet. Socket.io already does this, but we want to fallback to * AJAX instead of waiting however long for a socket to become available. If there's no * response from the socket server in a given time then AJAX will be run to load the initial * page content instead. * * This kind of mechanism should respond better under high load (if/when open socket connections * limits start to become a problem). It also allows us to easily disable the socket server * if we're experiencing any unforseen load issues. * * @package StopTheSpies Website * @author Sam Pospischil <[email protected]> * @since 2014-08-24 */ window.io || (io = {}); (function($, io) { var __CONNECTED__ = false, __LOADING__ = setTimeout(runBufferedRequests, STS.options.SOCKET_CONNECT_TIMEOUT), PRE_LOAD_CALLS = [], START = new Date(); function runBufferedRequests() { clearTimeout(__LOADING__); __LOADING__ = false; if (!PRE_LOAD_CALLS.length) { return; } for (var i = 0, l = PRE_LOAD_CALLS.length; i < l; ++i) { STS.app.api.apply(PRE_LOAD_CALLS[i][0], PRE_LOAD_CALLS[i][1]); } PRE_LOAD_CALLS = []; } /* if (STS.options.ENABLE_REALTIME) { var opts = undefined; if (STS.options.API_SOCKET_BASEURL) { opts = { resource : STS.options.API_SOCKET_BASEURL }; } io = io.connect(STS.options.API_BASE_URL, opts); io.on('connect', function() { __CONNECTED__ = true; runBufferedRequests(); }); io.on('disconnect', function() { __CONNECTED__ = false; }); //---------------------------------------------------------------------------- io.on('stats:update', function(stats) { STS.events.onStatsUpdate(stats); }); io.on('shares:update', function(shares) { STS.events.onSharesLoad(shares); }); io.on('tweets:updateCount', function(count) { $('.tweets-support-total').numberSpinner('set', count); }); io.on('l:views', function(reps) { STS.events.onLegislatorStatsIncrement(reps, 'views'); notifyLegislatorMap(reps, 'views'); }); io.on('l:calls', function(reps) { STS.events.onLegislatorStatsIncrement(reps, 'calls'); notifyLegislatorMap(reps, 'calls'); }); io.on('l:emails', function(reps) { STS.events.onLegislatorStatsIncrement(reps, 'emails'); notifyLegislatorMap(reps, 'emails'); }); io.on('l:tweets', function(reps) { STS.events.onLegislatorStatsIncrement(reps, 'tweets'); notifyLegislatorMap(reps, 'tweets'); }); io.on('l:facebooks', function(reps) { STS.events.onLegislatorStatsIncrement(reps, 'facebooks'); notifyLegislatorMap(reps, 'facebooks'); }); } */<|fim▁hole|> // reusable event handlers function notifyLegislatorMap(reps, event) { var colors = STS.CampaignMap.EVENT_COLORS; var color = colors[event], count, ward, rep; for (rep in reps) { if (!reps.hasOwnProperty(rep)) continue; for (var i = 0, l = STS.TOTAL_MAPS_COUNT; i < l; ++i) { ward = STS.CampaignMap.getWardForMember(i, rep); if (!ward) { continue; // senators :TODO: show some other way } count = reps[rep]; STS.anim.map.notifyElectorate(ward, color, count); } } } window._testMapPing = notifyLegislatorMap; //---------------------------------------------------------------------------- // EXPORTS STS.app = io; STS.app.api = function(ioEvent, ajaxUrl, data, onComplete, onError) { data || (data = {}); if (!__CONNECTED__ && __LOADING__) { PRE_LOAD_CALLS.push([this, arguments]); return; } if (__CONNECTED__ && ioEvent) { io.emit(ioEvent, data, onComplete); } else if (ajaxUrl) { var method = "GET"; if (ajaxUrl.url) { method = ajaxUrl.method; ajaxUrl = ajaxUrl.url; } $.ajax(ajaxUrl, { method: method, data: data, success: onComplete || function() {}, error: onError || function() {}, cache : true // :NOTE: we'll always cache AJAX requests, and never cache socket ones. If the socket server is having issues then stale info is probably better to avoid load. }); } }; })(jQuery, io);<|fim▁end|>
//----------------------------------------------------------------------------
<|file_name|>ble-uart.js<|end_file_name|><|fim▁begin|>/* Noble cread UART service example This example uses Sandeep Mistry's noble library for node.js to read and write from Bluetooth LE characteristics. It looks for a UART characteristic based on a proprietary UART service by Nordic Semiconductor. You can see this service implemented in Adafruit's BLEFriend library. created 30 Nov 2015 by Tom Igoe */ var noble = require('noble'); //noble library var util = require('util'); // utilities library // make an instance of the eventEmitter library: var EventEmitter = require('events').EventEmitter; // constructor function, so you can call new BleUart(): var BleUart = function (uuid) {<|fim▁hole|> self.connected = false; // whether the remote peripheral's connected self.peripheral; // the remote peripheral as an object EventEmitter.call(self); // make a copy of EventEmitter so you can emit events if (uuid) { // if the constructor was called with a different UUID, service = uuid; // then set that as the service to search for } // The scanning function: function scan(state) { if (state === 'poweredOn') { // if the radio's on, scan for this service noble.startScanning([service], false); } // emit a 'scanning' event: self.emit('scanning', state); } // the connect function: self.connect = function(peripheral) { self.peripheral = peripheral; peripheral.connect(); // start connection attempts // the connect function. This is local to the discovery function // because it needs to know the peripheral to discover services: function discover() { // once you know you have a peripheral with the desired // service, you can stop scanning for others: noble.stopScanning(); // get the service you want on this peripheral: peripheral.discoverServices([service],explore); } // called only when the peripheral has the service you're looking for: peripheral.on('connect', discover); // when a peripheral disconnects, run disconnect: peripheral.on('disconnect', self.disconnect); } // the services and characteristics exploration function: // once you're connected, this gets run: function explore(error, services) { // this gets run by the for-loop at the end of the // explore function, below: function getCharacteristics(error, characteristics) { for (var c in characteristics) { // loop over the characteristics if (characteristics[c].notify) { // if one has the notify property receive = characteristics[c]; // then it's the receive characteristic receive.notify(true); // turn on notifications // whenever a notify event happens, get the result. // this handles repeated notifications: receive.on('data', function(data, notification) { if (notification) { // if you got a notification self.emit('data', String(data)); // emit a data event } }); } if (characteristics[c].write) { // if a characteristic has a write property transmit = characteristics[c]; // then it's the transmit characteristic } } // end of getCharacteristics() // if you've got a valid transmit and receive characteristic, // then you're truly connected. Emit a connected event: if (transmit && receive) { self.connected = true; self.emit('connected', self.connected); } } // iterate over the services discovered. If one matches // the UART service, look for its characteristics: for (var s in services) { if (services[s].uuid === service) { services[s].discoverCharacteristics([], getCharacteristics); return; } } } // the BLE write function. If there's a valid transmit characteristic, /// then write data out to it as a Buffer: self.write = function(data) { if (transmit) { transmit.write(new Buffer(data)); } } // the BLE disconnect function: self.disconnect = function() { self.connected = false; } // when the radio turns on, start scanning: noble.on('stateChange', scan); // if you discover a peripheral with the appropriate service, connect: noble.on('discover', self.connect); } util.inherits(BleUart, EventEmitter); // BleUart inherits all the EventEmitter properties module.exports = BleUart; // export BleUart<|fim▁end|>
var service = '6e400001b5a3f393e0a9e50e24dcca9e'; // the service you want var receive, transmit; // transmit and receive BLE characteristics var self = this; // reference to the instance of BleUart
<|file_name|>local_data_priv.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[allow(missing_doc)]; use cast; use cmp::Eq; use libc; use prelude::*; use task::rt; use local_data::LocalDataKey; use super::rt::rust_task; use rt::task::{Task, LocalStorage}; pub enum Handle { OldHandle(*rust_task), NewHandle(*mut LocalStorage) } impl Handle { pub fn new() -> Handle { use rt::{context, OldTaskContext}; use rt::local::Local; unsafe { match context() { OldTaskContext => { OldHandle(rt::rust_get_task()) } _ => { let task = Local::unsafe_borrow::<Task>(); NewHandle(&mut (*task).storage) } } } } } pub trait LocalData { } impl<T: 'static> LocalData for @T { } impl Eq for @LocalData { fn eq(&self, other: &@LocalData) -> bool { unsafe { let ptr_a: &(uint, uint) = cast::transmute(self); let ptr_b: &(uint, uint) = cast::transmute(other); return ptr_a == ptr_b; } } fn ne(&self, other: &@LocalData) -> bool { !(*self).eq(other) } } // If TLS is used heavily in future, this could be made more efficient with a // proper map. type TaskLocalElement = (*libc::c_void, *libc::c_void, @LocalData); // Has to be a pointer at outermost layer; the foreign call returns void *. type TaskLocalMap = @mut ~[Option<TaskLocalElement>]; fn cleanup_task_local_map(map_ptr: *libc::c_void) { unsafe { assert!(!map_ptr.is_null()); // Get and keep the single reference that was created at the // beginning. let _map: TaskLocalMap = cast::transmute(map_ptr); // All local_data will be destroyed along with the map. } } // Gets the map from the runtime. Lazily initialises if not done so already. unsafe fn get_local_map(handle: Handle) -> TaskLocalMap { match handle { OldHandle(task) => get_task_local_map(task), NewHandle(local_storage) => get_newsched_local_map(local_storage) } } unsafe fn get_task_local_map(task: *rust_task) -> TaskLocalMap { extern fn cleanup_task_local_map_extern_cb(map_ptr: *libc::c_void) { cleanup_task_local_map(map_ptr); } // Relies on the runtime initialising the pointer to null. // Note: The map's box lives in TLS invisibly referenced once. Each time // we retrieve it for get/set, we make another reference, which get/set // drop when they finish. No "re-storing after modifying" is needed. let map_ptr = rt::rust_get_task_local_data(task); if map_ptr.is_null() { let map: TaskLocalMap = @mut ~[]; // NB: This bumps the ref count before converting to an unsafe pointer, // keeping the map alive until TLS is destroyed rt::rust_set_task_local_data(task, cast::transmute(map)); rt::rust_task_local_data_atexit(task, cleanup_task_local_map_extern_cb); map } else { let map = cast::transmute(map_ptr); let nonmut = cast::transmute::<TaskLocalMap, @~[Option<TaskLocalElement>]>(map); cast::bump_box_refcount(nonmut); map } } unsafe fn get_newsched_local_map(local: *mut LocalStorage) -> TaskLocalMap { match &mut *local { &LocalStorage(map_ptr, Some(_)) => { assert!(map_ptr.is_not_null()); let map = cast::transmute(map_ptr); let nonmut = cast::transmute::<TaskLocalMap, @~[Option<TaskLocalElement>]>(map); cast::bump_box_refcount(nonmut); return map; } &LocalStorage(ref mut map_ptr, ref mut at_exit) => { assert!((*map_ptr).is_null()); let map: TaskLocalMap = @mut ~[]; *map_ptr = cast::transmute(map); let at_exit_fn: ~fn(*libc::c_void) = |p|cleanup_task_local_map(p); *at_exit = Some(at_exit_fn); return map; } } } unsafe fn key_to_key_value<T: 'static>(key: LocalDataKey<T>) -> *libc::c_void { // Keys are closures, which are (fnptr,envptr) pairs. Use fnptr. // Use reinterpret_cast -- transmute would leak (forget) the closure. let pair: (*libc::c_void, *libc::c_void) = cast::transmute_copy(&key); pair.first() } // If returning Some(..), returns with @T with the map's reference. Careful! unsafe fn local_data_lookup<T: 'static>( map: TaskLocalMap, key: LocalDataKey<T>) -> Option<(uint, *libc::c_void)> { let key_value = key_to_key_value(key); let map_pos = (*map).iter().position(|entry| match *entry { Some((k,_,_)) => k == key_value, None => false } ); do map_pos.map |index| { // .get() is guaranteed because of "None { false }" above. let (_, data_ptr, _) = (*map)[*index].get(); (*index, data_ptr) } } unsafe fn local_get_helper<T: 'static>( handle: Handle, key: LocalDataKey<T>, do_pop: bool) -> Option<@T> { let map = get_local_map(handle); // Interpreturn our findings from the map do local_data_lookup(map, key).map |result| { // A reference count magically appears on 'data' out of thin air. It // was referenced in the local_data box, though, not here, so before // overwriting the local_data_box we need to give an extra reference. // We must also give an extra reference when not removing. let (index, data_ptr) = *result; let data: @T = cast::transmute(data_ptr); cast::bump_box_refcount(data); if do_pop { map[index] = None; } data } } pub unsafe fn local_pop<T: 'static>( handle: Handle, key: LocalDataKey<T>) -> Option<@T> { local_get_helper(handle, key, true) }<|fim▁hole|> local_get_helper(handle, key, false) } pub unsafe fn local_set<T: 'static>( handle: Handle, key: LocalDataKey<T>, data: @T) { let map = get_local_map(handle); // Store key+data as *voids. Data is invisibly referenced once; key isn't. let keyval = key_to_key_value(key); // We keep the data in two forms: one as an unsafe pointer, so we can get // it back by casting; another in an existential box, so the reference we // own on it can be dropped when the box is destroyed. The unsafe pointer // does not have a reference associated with it, so it may become invalid // when the box is destroyed. let data_ptr = *cast::transmute::<&@T, &*libc::c_void>(&data); let data_box = @data as @LocalData; // Construct new entry to store in the map. let new_entry = Some((keyval, data_ptr, data_box)); // Find a place to put it. match local_data_lookup(map, key) { Some((index, _old_data_ptr)) => { // Key already had a value set, _old_data_ptr, whose reference // will get dropped when the local_data box is overwritten. map[index] = new_entry; } None => { // Find an empty slot. If not, grow the vector. match (*map).iter().position(|x| x.is_none()) { Some(empty_index) => { map[empty_index] = new_entry; } None => { map.push(new_entry); } } } } } pub unsafe fn local_modify<T: 'static>( handle: Handle, key: LocalDataKey<T>, modify_fn: &fn(Option<@T>) -> Option<@T>) { // Could be more efficient by doing the lookup work, but this is easy. let newdata = modify_fn(local_pop(handle, key)); if newdata.is_some() { local_set(handle, key, newdata.unwrap()); } }<|fim▁end|>
pub unsafe fn local_get<T: 'static>( handle: Handle, key: LocalDataKey<T>) -> Option<@T> {
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * Copyright (c) 2020 Choko ([email protected]) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import './organization'; import './storage';<|fim▁end|>
* MIT License *
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # girc documentation build configuration file, created by # sphinx-quickstart on Fri Jul 10 20:20:32 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import shlex # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'girc' copyright = '2015, Daniel Oaks' author = 'Daniel Oaks' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- import sphinx_py3doc_enhanced_theme html_theme = "sphinx_py3doc_enhanced_theme" html_theme_path = [sphinx_py3doc_enhanced_theme.get_html_theme_path()] html_theme_options = { 'bodyfont': '\'Lucida Grande\', Arial, sans-serif', 'headfont': '\'Lucida Grande\', Arial, sans-serif', 'footerbgcolor': 'white', 'footertextcolor': '#555555', 'relbarbgcolor': 'white', 'relbartextcolor': '#666666', 'relbarlinkcolor': '#444444', 'sidebarbgcolor': 'white', 'sidebartextcolor': '#444444', 'sidebarlinkcolor': '#444444', 'bgcolor': 'white', 'textcolor': '#222222', 'linkcolor': '#0072AA', 'visitedlinkcolor': '#6363bb', 'headtextcolor': '#1a1a1a', 'headbgcolor': 'white', 'headlinkcolor': '#aaaaaa', 'extrastyling': False, } # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated.<|fim▁hole|> # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'gircdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'girc.tex', 'girc Documentation', 'Daniel Oaks', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'girc', 'girc Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'girc', 'girc Documentation', author, 'girc', 'A modern Python IRC library for Python 3.4, based on asyncio.', 'Internet Relay Chat'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # taken from http://stackoverflow.com/questions/7250659/ import sys from os.path import basename try: from StringIO import StringIO except ImportError: from io import StringIO from sphinx.util.compat import Directive from docutils import nodes, statemachine class ExecDirective(Directive): """Execute the specified python code and insert the output into the document""" has_content = True def run(self): oldStdout, sys.stdout = sys.stdout, StringIO() tab_width = self.options.get('tab-width', self.state.document.settings.tab_width) source = self.state_machine.input_lines.source(self.lineno - self.state_machine.input_offset - 1) try: exec('\n'.join(self.content)) text = sys.stdout.getvalue() lines = statemachine.string2lines(text, tab_width, convert_whitespace=True) self.state_machine.insert_input(lines, source) return [] except Exception: return [nodes.error(None, nodes.paragraph(text = "Unable to execute python code at %s:%d:" % (basename(source), self.lineno)), nodes.paragraph(text = str(sys.exc_info()[1])))] finally: sys.stdout = oldStdout def setup(app): app.add_directive('exec', ExecDirective)<|fim▁end|>
#html_domain_indices = True # If false, no index is generated. #html_use_index = True
<|file_name|>use_undefined_result.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> peg::parser!(grammar foo() for str { rule foo() = "asdf" rule bar() -> u32 = x:foo() { 0 } //~ ERROR using result of rule `foo`, which does not return a value }); fn main() {}<|fim▁end|>
extern crate peg;
<|file_name|>schema.py<|end_file_name|><|fim▁begin|>import inspect import logging import threading from django.apps import apps from django.conf import settings from django.core.cache import cache from django.core.exceptions import ImproperlyConfigured from django.db import connection from django.db.migrations.operations.base import Operation from django.utils.translation import lazy from .exceptions import TemplateSchemaActivation, SchemaNotFound from .signals import find_schema LOGGER = logging.getLogger(__name__) LOGGER.addHandler(logging.NullHandler()) _thread_locals = threading.local() def get_schema_model(): """ Return the class that is currently set as the schema model. """<|fim▁hole|> except AttributeError: raise ImproperlyConfigured("BOARDINGHOUSE_SCHEMA_MODEL is not set: is 'boardinghouse' in your INSTALLED_APPS?") except ValueError: raise ImproperlyConfigured("BOARDINGHOUSE_SCHEMA_MODEL must be of the form 'app_label.model_name'") except LookupError: raise ImproperlyConfigured( "BOARDINGHOUSE_SCHEMA_MODEL refers to model '{0!s}' that has not been installed".format( settings.BOARDINGHOUSE_SCHEMA_MODEL)) def _get_search_path(): cursor = connection.cursor() cursor.execute('SELECT current_schema()') search_path = cursor.fetchone()[0] cursor.close() return search_path def _set_search_path(search_path): cursor = connection.cursor() cursor.execute('SET search_path TO %s,{0}'.format(settings.PUBLIC_SCHEMA), [search_path]) cursor.close() def _schema_exists(schema_name, cursor=None): if cursor: cursor.execute('''SELECT schema_name FROM information_schema.schemata WHERE schema_name = %s''', [schema_name]) return bool(cursor.fetchone()) cursor = connection.cursor() try: return _schema_exists(schema_name, cursor) finally: cursor.close() def get_active_schema_name(): """ Get the currently active schema. This requires a database query to ask it what the current `search_path` is. """ active_schema = getattr(_thread_locals, 'schema', None) if not active_schema: reported_schema = _get_search_path()[0] if _get_schema(reported_schema): active_schema = reported_schema else: active_schema = None _thread_locals.schema = active_schema return active_schema def get_active_schema(): """ Get the (internal) name of the currently active schema. """ return _get_schema(get_active_schema_name()) def get_active_schemata(): """ Get a (cached) list of all currently active schemata. """ schemata = cache.get('active-schemata') if schemata is None: schemata = get_schema_model().objects.active() cache.set('active-schemata', schemata) return schemata def _get_schema(schema_name): """ Get the matching active schema object for the given name, if it exists. """ for handler, response in find_schema.send(sender=None, schema=schema_name): if response: return response def activate_schema(schema_name): """ Activate the current schema: this will execute, in the database connection, something like: .. code:: sql SET search_path TO "foo",public; It sends signals before and after that the schema will be, and was activated. Must be passed a string: the internal name of the schema to activate. """ from .signals import schema_pre_activate, schema_post_activate if schema_name == settings.TEMPLATE_SCHEMA: raise TemplateSchemaActivation() schema_pre_activate.send(sender=None, schema_name=schema_name) _set_search_path(schema_name) found_schema = _get_search_path() if found_schema != schema_name: raise SchemaNotFound('Schema activation failed. Expected "{0}", saw "{1}"'.format( schema_name, found_schema, )) schema_post_activate.send(sender=None, schema_name=schema_name) _thread_locals.schema = schema_name def activate_template_schema(): """ Activate the template schema. You probably don't want to do this. Sometimes you do (like for instance to apply migrations). """ from .signals import schema_pre_activate, schema_post_activate _thread_locals.schema = None schema_name = settings.TEMPLATE_SCHEMA schema_pre_activate.send(sender=None, schema_name=schema_name) _set_search_path(schema_name) if _get_search_path() != schema_name: raise SchemaNotFound('Template schema was not activated. It seems "{0}" is active.'.format(_get_search_path())) schema_post_activate.send(sender=None, schema_name=schema_name) def get_template_schema(): return get_schema_model()(settings.TEMPLATE_SCHEMA) def deactivate_schema(schema=None): """ Deactivate the provided (or current) schema. """ from .signals import schema_pre_activate, schema_post_activate cursor = connection.cursor() schema_pre_activate.send(sender=None, schema_name=None) cursor.execute('SET search_path TO "$user",{0}'.format(settings.PUBLIC_SCHEMA)) schema_post_activate.send(sender=None, schema_name=None) _thread_locals.schema = None cursor.close() #: These models are required to be shared by the system. REQUIRED_SHARED_MODELS = [ 'auth.user', 'auth.permission', 'auth.group', 'boardinghouse.schema', 'sites.site', 'sessions.session', 'contenttypes.contenttype', 'admin.logentry', 'migrations.migration', # In the case these are not the default values. lazy(lambda: settings.BOARDINGHOUSE_SCHEMA_MODEL.lower())(), lazy(lambda: settings.AUTH_USER_MODEL.lower())(), ] REQUIRED_SHARED_TABLES = [ 'django_migrations', ] def _is_join_model(model): """ We define a join model to be one that has no fields that are not related fields (excluding the primary key), and that has more than one field. This may be a satisfactory definition, as a through model, which has non-related fields, must have been explicitly declared, and all automatic join models will have just (pk, from, to). """ return len(model._meta.fields) > 1 and all( (field.primary_key or field.remote_field) for field in model._meta.fields ) def is_shared_model(model): """ Is the model (or instance of a model) one that should be in the public/shared schema? """ if model._is_shared_model: return True app_model = '{m.app_label}.{m.model_name}'.format(m=model._meta).lower() # These should be case insensitive! if app_model in REQUIRED_SHARED_MODELS: return True if app_model in [x.lower() for x in settings.SHARED_MODELS]: return True # Sometimes, we want a join table to be private. if app_model in [x.lower() for x in settings.PRIVATE_MODELS]: return False # if all fields are auto or fk, then we are a join model, # and if all related objects are shared, then we must # also be shared, unless we were explicitly marked as private # above. if _is_join_model(model): return all( is_shared_model(field.remote_field.model) for field in model._meta.fields if field.remote_field ) return False def _get_models(apps, stack): """ If we are in a migration operation, we need to look in that for models. We really only should be injecting ourselves if we find a frame that contains a database_(forwards|backwards) function. Otherwise, we can look in the `apps` object passed in. """ for frame in stack: frame_locals = frame[0].f_locals if frame[3] == 'database_forwards' and all( local in frame_locals for local in ('from_state', 'to_state', 'schema_editor', 'self') ) and isinstance(frame_locals['self'], Operation): # Should this be from_state, or to_state, or should we look in both? from_state = frame_locals['from_state'] to_state = frame_locals['to_state'] models = set() if to_state.apps: models = models.union(to_state.apps.get_models()) if from_state.apps: models = models.union(from_state.apps.get_models()) return models return apps.get_models() def _get_join_model(table, table_map): """ Given a database table, and a mapping of tables to models, look for a many-to-many field on models that uses that database table. Currently, it only looks within models that have a matching prefix. """ for db_table, model in table_map.items(): if table.startswith(db_table): for field in model._meta.local_many_to_many: through = (field.remote_field if hasattr(field, 'remote_field') else field.rel).through if through._meta.db_table == table: return through def is_shared_table(table, apps=apps): """ Is the model from the provided database table name shared? We may need to look and see if we can work out which models this table joins. """ if table in REQUIRED_SHARED_TABLES: return True # Get a mapping of all table names to models. models = _get_models(apps, inspect.stack()) table_map = { x._meta.db_table: x for x in models if not x._meta.proxy } # If we have a match, see if that one is shared. if table in table_map: return is_shared_model(table_map[table]) # It may be a join table. through = _get_join_model(table, table_map) if through: return is_shared_model(through) # Not a join table: just assume that it's not shared. return False # Internal helper functions. def _table_exists(table_name, schema=None): cursor = connection.cursor() cursor.execute("""SELECT * FROM information_schema.tables WHERE table_name = %s AND table_schema = %s""", [table_name, schema or settings.PUBLIC_SCHEMA]) return bool(cursor.fetchone()) def _schema_table_exists(): return _table_exists(get_schema_model()._meta.db_table)<|fim▁end|>
try: return apps.get_model(settings.BOARDINGHOUSE_SCHEMA_MODEL)
<|file_name|>set-bit.test.ts<|end_file_name|><|fim▁begin|>import setBit from './set-bit' test('sets bits', () => { expect(setBit(0b0, 7, 1)).toEqual(128) expect(setBit(0b11111111, 8, 1)).toEqual(511) expect(setBit(0b1000000000, 9, 0)).toEqual(0)<|fim▁hole|><|fim▁end|>
expect(setBit(0, 31, 1)).toEqual(1 << 31) })
<|file_name|>car.helper.js<|end_file_name|><|fim▁begin|>function CarHelper(car) { if (!(this instanceof arguments.callee)) { return new CarHelper(car); } var backtrack = []; car.helper = { getElement: function (x, y) { return car.table.children[y].children[x]; }, canMove: function (x, y) { return /[wrgby\*]/.test(this.getElement(x, y).textContent); }, gravity: function (x, y) { return this.canMove(x, y) ? {x: x, y: y} : undefined; }, left: function (x, y) { x -= 1; return x >= 0 ? this.gravity(x, y) : undefined; }, right: function (x, y) { x += 1; return car.mapData.N > x ? this.gravity(x, y) : undefined; }, up: function (x, y) { y -= 1; return y >= 0 ? this.gravity(x, y) : undefined; }, down: function (x, y) { y += 1; return car.mapData.N > y ? this.gravity(x, y) : undefined; }, random: function (min, max) { return Math.floor(Math.random() * (max - min)) + min; }, alreadyPassed: function (a, b) { //FIXME : check for all the cars var ways = car.way(); for (var i = 0, len = ways.length; i < len; i++) { if (ways[i] === a.toPosition()) { if (i > 0 && ways[i - 1] === b.toPosition()) return true; if ((i + 1) < len && ways[i + 1] === b.toPosition()) return true; } } return false; }, direction: function () { var where = [] , pos = car.position() , x = pos.x , y = pos.y; if (backtrack.length === car.way().length) { where = backtrack.pop(); } else { // LEFT var test = this.left(x, y); if (test && !this.alreadyPassed(pos, test)) { where.push(test); } // RIGHT test = this.right(x, y); if (test && !this.alreadyPassed(pos, test)) { where.push(test); } // UP test = this.up(x, y); if (test && !this.alreadyPassed(pos, test)) { where.push(test); } // DOWN test = this.down(x, y);<|fim▁hole|> if (test && !this.alreadyPassed(pos, test)) { where.push(test); } } if (where.length) { var direction = where.splice(this.random(0, where.length), 1); backtrack.push(where); return direction.pop(); } return undefined; }, moveTo: function (x, y) { var element = this.getElement(x, y) , pos = car.position(); car.path(x + ',' + y, element.innerHTML); element.innerHTML = 'w'.fontcolor(car.color()); car.way(x + ',' + y); this.update(); return true; }, backTo: function (x, y) { var element = this.getElement(x, y) , paths = car.path(); if (car.way().lastIndexOf(x + "," + y) === -1) { element.innerHTML = paths[x + ',' + y]; } this.update(); return true; }, toCoord: function (str) { var coord = str.split(',') , y = Number(coord.pop()) , x = Number(coord.pop()); return {x: x, y: y}; }, update: function () { var coord = this.toCoord(car.way().slice(-1).pop()); car.position(coord.x, coord.y); }, move: function () { var coord = this.direction(); if (coord) { return this.moveTo(coord.x, coord.y); } var ways = car.way(); if (ways.length) { var coord = this.toCoord(ways.pop()); return this.backTo(coord.x, coord.y); } return false; } }; return car; }<|fim▁end|>
<|file_name|>unused-struct-variant.rs<|end_file_name|><|fim▁begin|>#![deny(unused)]<|fim▁hole|>struct F; struct B; enum E { Foo(F), Bar(B), //~ ERROR variant is never constructed } fn main() { let _ = E::Foo(F); }<|fim▁end|>
<|file_name|>should-update-task.js<|end_file_name|><|fim▁begin|>import chai, { expect } from 'chai' import chaiHttp from 'chai-http' import server from '../src/index' import Query from '../src/query' import { url } from '../src/resources' chai.use(chaiHttp) export default (done) => { Query.create({title: 'Task', completed: true}) .then((task) => { task.title = 'Tarefa Alterada' chai.request(server.info.uri) .put(url) .send(task) .end((err, res) => { expect(res).to.have.status(200) expect(res.body).to.not.be.null<|fim▁hole|> done() }) }) .error((err) => { done(err) }) }<|fim▁end|>
expect(res.body).to.have.property('title') expect(res.body.title).to.equal('Tarefa Alterada')
<|file_name|>test_eve.py<|end_file_name|><|fim▁begin|>import mock from tests.compat import unittest from tests.utils import APITestCase import evelink.eve as evelink_eve class EVETestCase(APITestCase): def setUp(self): super(EVETestCase, self).setUp() self.eve = evelink_eve.EVE(api=self.api) def test_character_names_from_ids(self): self.api.get.return_value = self.make_api_result("eve/character_name.xml") result, current, expires = self.eve.character_names_from_ids(set([1,2])) self.assertEqual(result, {1:"EVE System", 2:"EVE Central Bank"}) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/CharacterName', params={'IDs': set([1,2])}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_character_name_from_id(self): self.api.get.return_value = self.make_api_result("eve/character_name_single.xml") result, current, expires = self.eve.character_name_from_id(1) self.assertEqual(result, "EVE System") self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/CharacterName', params={'IDs': [1]}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_character_ids_from_names(self): self.api.get.return_value = self.make_api_result("eve/character_id.xml") result, current, expires = self.eve.character_ids_from_names(set(["EVE System", "EVE Central Bank"])) self.assertEqual(result, {"EVE System":1, "EVE Central Bank":2}) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/CharacterID', params={'names': set(["EVE System","EVE Central Bank"])}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_character_id_from_name(self): self.api.get.return_value = self.make_api_result("eve/character_id_single.xml") result, current, expires = self.eve.character_id_from_name("EVE System") self.assertEqual(result, 1) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/CharacterID', params={'names': ["EVE System"]}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_affiliations_for_characters(self): self.api.get.return_value = self.make_api_result("eve/character_affiliation.xml") result, current, expires = self.eve.affiliations_for_characters(set([92168909, 401111892, 1979087900])) self.assertEqual(result, { 1979087900: { 'id': 1979087900, 'name': 'Marcel Devereux', 'faction': { 'id': 500004, 'name': 'Gallente Federation' }, 'corp': { 'id': 1894214152, 'name': 'Aideron Robotics' } }, 401111892: { 'id': 401111892, 'name': 'ShadowMaster', 'alliance': { 'id': 99000652, 'name': 'RvB - BLUE Republic' }, 'corp': { 'id': 1741770561, 'name': 'Blue Republic' } }, 92168909: { 'id': 92168909, 'name': 'CCP FoxFour', 'alliance': { 'id': 434243723, 'name': 'C C P Alliance' }, 'corp': { 'id': 109299958, 'name': 'C C P' } } }) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/CharacterAffiliation', params={'ids': set([92168909, 401111892, 1979087900])}) ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_affiliations_for_character(self): self.api.get.return_value = self.make_api_result("eve/character_affiliation_single.xml") result, current, expires = self.eve.affiliations_for_character(92168909) self.assertEqual(result, { 'id': 92168909, 'name': 'CCP FoxFour', 'alliance': { 'id': 434243723, 'name': 'C C P Alliance' }, 'corp': { 'id': 109299958, 'name': 'C C P' } }) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/CharacterAffiliation', params={'ids': [92168909]}) ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_character_info_from_id(self): self.api.get.return_value = self.make_api_result("eve/character_info.xml") result, current, expires = self.eve.character_info_from_id(1234) self.assertEqual(result, { 'alliance': {'id': None, 'name': None, 'timestamp': None}, 'bloodline': 'Civire', 'corp': {'id': 2345, 'name': 'Test Corporation', 'timestamp': 1338689400}, 'history': [ {'corp_id': 1, 'corp_name': 'test_one', 'start_ts': 1338603000}, {'corp_id': 2, 'corp_name': 'test_two', 'start_ts': 1318422896} ], 'id': 1234, 'isk': None, 'location': None, 'name': 'Test Character', 'race': 'Caldari', 'sec_status': 2.5, 'ship': {'name': None, 'type_id': None, 'type_name': None}, 'skillpoints': None, }) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/CharacterInfo', params={'characterID': 1234}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_alliances(self): self.api.get.return_value = self.make_api_result("eve/alliances.xml") result, current, expires = self.eve.alliances() self.assertEqual(result, { 1: { 'executor_id': 2, 'id': 1, 'member_corps': { 2: {'id': 2, 'timestamp': 1289250660}, 3: {'id': 3, 'timestamp': 1327728960}, 4: {'id': 4, 'timestamp': 1292440500}, }, 'member_count': 123, 'name': 'Test Alliance', 'ticker': 'TEST', 'timestamp': 1272717240, } }) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/AllianceList', params={}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_errors(self): self.api.get.return_value = self.make_api_result("eve/errors.xml") result, current, expires = self.eve.errors() self.assertEqual(result, {1:"Foo", 2:"Bar"}) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/ErrorList', params={}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_faction_warfare_stats(self): self.api.get.return_value = self.make_api_result("eve/faction_warfare_stats.xml") result, current, expires = self.eve.faction_warfare_stats() self.assertEqual(result, { 'kills': {'total': 232772, 'week': 3246, 'yesterday': 677}, 'points': {'total': 44045189, 'week': 414049, 'yesterday': 55087}, 'factions': { 500001: { 'id': 500001, 'kills': {'total': 59239, 'week': 627, 'yesterday': 115}, 'name': 'Caldari State', 'pilots': 5324, 'points': {'total': 4506493, 'week': 64548, 'yesterday': 9934}, 'systems': 61, }, 500002: { 'id': 500002, 'kills': {'total': 56736, 'week': 952, 'yesterday': 213}, 'name': 'Minmatar Republic', 'pilots': 4068, 'points': {'total': 3627522, 'week': 51211, 'yesterday': 2925}, 'systems': 0, }, 500003: { 'id': 500003, 'kills': {'total': 55717, 'week': 1000, 'yesterday': 225}, 'name': 'Amarr Empire', 'pilots': 3960, 'points': {'total': 3670190, 'week': 50518, 'yesterday': 3330}, 'systems': 11, }, 500004: { 'id': 500004, 'kills': {'total': 61080, 'week': 667, 'yesterday': 124}, 'name': 'Gallente Federation', 'pilots': 3663, 'points': {'total': 4098366, 'week': 62118, 'yesterday': 10343}, 'systems': 0, }, }, 'wars': [ { 'against': {'id': 500002, 'name': 'Minmatar Republic'}, 'faction': {'id': 500001, 'name': 'Caldari State'}, }, { 'against': {'id': 500004, 'name': 'Gallente Federation'}, 'faction': {'id': 500001, 'name': 'Caldari State'}, }, { 'against': {'id': 500001, 'name': 'Caldari State'}, 'faction': {'id': 500002, 'name': 'Minmatar Republic'}, }, { 'against': {'id': 500003, 'name': 'Amarr Empire'}, 'faction': {'id': 500002, 'name': 'Minmatar Republic'}, }, { 'against': {'id': 500002, 'name': 'Minmatar Republic'}, 'faction': {'id': 500003, 'name': 'Amarr Empire'}, }, { 'against': {'id': 500004, 'name': 'Gallente Federation'}, 'faction': {'id': 500003, 'name': 'Amarr Empire'}, }, { 'against': {'id': 500001, 'name': 'Caldari State'}, 'faction': {'id': 500004, 'name': 'Gallente Federation'}, }, { 'against': {'id': 500003, 'name': 'Amarr Empire'}, 'faction': {'id': 500004, 'name': 'Gallente Federation'}, } ], }) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/FacWarStats', params={}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_faction_warfare_leaderboard(self): self.api.get.return_value = self.make_api_result("eve/faction_warfare_leaderboard.xml") result, current, expires = self.eve.faction_warfare_leaderboard() self.assertEqual(result, { 'char': { 'kills': { 'total': [{'id': 673662188, 'kills': 451, 'name': 'Val Erian'}], 'week': [{'id': 187452523, 'kills': 52, 'name': 'Tigrana Blanque'}], 'yesterday': [ {'id': 1007512845, 'kills': 14, 'name': 'StonedBoy'}, {'id': 646053002, 'kills': 11, 'name': 'Erick Voliffe'}, ],<|fim▁hole|> 'week': [{'id': 161929388, 'name': 'Ankhesentapemkah', 'points': 20851}], 'yesterday': [{'id': 774720050, 'name': 'v3nd3tt4', 'points': 3151}], }, }, 'corp': { 'kills': { 'total': [{'id': 673662188, 'kills': 451, 'name': 'Val Erian'}], 'week': [{'id': 187452523, 'kills': 52, 'name': 'Tigrana Blanque'}], 'yesterday': [ {'id': 1007512845, 'kills': 14, 'name': 'StonedBoy'}, {'id': 646053002, 'kills': 11, 'name': 'Erick Voliffe'}, ], }, 'points': { 'total': [{'id': 395923478, 'name': 'sasawong', 'points': 197046}], 'week': [{'id': 161929388, 'name': 'Ankhesentapemkah', 'points': 20851}], 'yesterday': [{'id': 774720050, 'name': 'v3nd3tt4', 'points': 3151}], }, }, 'faction': { 'kills': { 'total': [{'id': 500004, 'kills': 104, 'name': 'Gallente Federation'}], 'week': [{'id': 500004, 'kills': 105, 'name': 'Gallente Federation'}], 'yesterday': [{'id': 500004, 'kills': 106, 'name': 'Gallente Federation'}], }, 'points': { 'total': [{'id': 500004, 'points': 101, 'name': 'Gallente Federation'}], 'week': [{'id': 500004, 'points': 102, 'name': 'Gallente Federation'}], 'yesterday': [{'id': 500004, 'points': 103, 'name': 'Gallente Federation'}], }, }, }) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/FacWarTopStats', params={}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_conquerable_stations(self): self.api.get.return_value = self.make_api_result("eve/conquerable_stations.xml") result, current, expires = self.eve.conquerable_stations() self.assertEqual(result, { 1:{ 'id':1, 'name':"Station station station", 'type_id':123, 'system_id':512, 'corp':{ 'id':444, 'name':"Valkyries of Night" } }, 2:{ 'id':2, 'name':"Station the station", 'type_id':42, 'system_id':503, 'corp':{ 'id':400, 'name':"Deus Fides Empire"} } }) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/ConquerableStationlist', params={}), ]) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) def test_skill_tree(self): self.api.get.return_value = self.make_api_result("eve/skill_tree.xml") result, current, expires = self.eve.skill_tree() self.assertEqual(result, { 255: { 'id': 255, 'name': 'Gunnery', 'skills': { 3300: { 'attributes': { 'primary': 'perception', 'secondary': 'willpower', }, 'bonuses': { 'turretSpeeBonus': { 'type': 'turretSpeeBonus', 'value': -2.0, }, }, 'description': "Basic turret operation skill. 2% Bonus to weapon turrets' rate of fire per skill level.", 'group_id': 255, 'id': 3300, 'name': 'Gunnery', 'published': True, 'rank': 1, 'required_skills': {}, }, 3301: { 'attributes': { 'primary': 'perception', 'secondary': 'willpower', }, 'bonuses': { 'damageMultiplierBonus': { 'type': 'damageMultiplierBonus', 'value': 5.0, }, }, 'description': 'Operation of small hybrid turrets. 5% Bonus to small hybrid turret damage per level.', 'group_id': 255, 'id': 3301, 'name': 'Small Hybrid Turret', 'published': True, 'rank': 1, 'required_skills': { 3300: { 'id': 3300, 'level': 1, 'name': 'Gunnery', }, }, }, }, }, 266: { 'id': 266, 'name': 'Corporation Management', 'skills': { 11584 : { 'id': 11584, 'group_id': 266, 'name': 'Anchoring', 'description': 'Skill at Anchoring Deployables. Can not be trained on Trial Accounts.', 'published': True, 'rank': 3, 'attributes': { 'primary': 'memory', 'secondary': 'charisma', }, 'required_skills': {}, 'bonuses': { 'canNotBeTrainedOnTrial': { 'type': 'canNotBeTrainedOnTrial', 'value': 1.0, } } }, 3369 : { 'id': 3369, 'group_id': 266, 'name': 'CFO Training', 'description': 'Skill at managing corp finances. 5% discount on all fees at non-hostile NPC station if acting as CFO of a corp. ', 'published': False, 'rank': 3, 'attributes': { 'primary': 'memory', 'secondary': 'charisma', }, 'required_skills': { 3363 : { 'id' : 3363, 'level' : 2, 'name' : None }, 3444 : { 'id' : 3444, 'level' : 3, 'name' : None }, }, 'bonuses': {} } } } }) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/SkillTree', params={}) ]) def test_reference_types(self): self.api.get.return_value = self.make_api_result("eve/reference_types.xml") result, current, expires = self.eve.reference_types() self.assertEqual(result, { 0: 'Undefined', 1: 'Player Trading', 2: 'Market Transaction', 3: 'GM Cash Transfer', 4: 'ATM Withdraw', 5: 'ATM Deposit' }) self.assertEqual(current, 12345) self.assertEqual(expires, 67890) self.assertEqual(self.api.mock_calls, [ mock.call.get('eve/RefTypes', params={}) ]) if __name__ == "__main__": unittest.main()<|fim▁end|>
}, 'points': { 'total': [{'id': 395923478, 'name': 'sasawong', 'points': 197046}],
<|file_name|>Action.py<|end_file_name|><|fim▁begin|>class Action: label = "" selectable = 0 def __init__ (self,label="",s=0): self.label = label self.selectable = s def getLabel (self): return self.label def do (self):<|fim▁hole|> def canSelect (self): return self.selectable<|fim▁end|>
tmp = 1
<|file_name|>Reference.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package com.piron1991.builder_tools.reference; public class Reference { public static final String MOD_ID = "builder_tools"; public static final String VERSION = "0.1"; public static final String MOD_NAME = "Builder tools"; public static final String CPROXY = "com.piron1991.builder_tools.proxy.clientProxy"; public static final String SPROXY = "com.piron1991.builder_tools.proxy.serverProxy"; public static final String SIZE_CATEGORY="Ranges of block placings for tools: "; public static final String RECIPE_CATEGORY="Recipes for tools: "; }<|fim▁end|>
<|file_name|>run_julia.py<|end_file_name|><|fim▁begin|>import sys import time from mpi4py.futures import MPICommExecutor x0 = -2.0 x1 = +2.0 y0 = -1.5 y1 = +1.5 w = 1600 h = 1200 dx = (x1 - x0) / w dy = (y1 - y0) / h def julia(x, y): c = complex(0, 0.65) z = complex(x, y) n = 255 while abs(z) < 3 and n > 1: z = z**2 + c n -= 1 return n def julia_line(k): line = bytearray(w) y = y1 - k * dy for j in range(w): x = x0 + j * dx line[j] = julia(x, y) return line def plot(image): import warnings<|fim▁hole|> try: from matplotlib import pyplot as plt except ImportError: return plt.figure() plt.imshow(image, aspect='equal', cmap='cubehelix') plt.axis('off') try: plt.draw() plt.pause(2) except: pass def test_julia(): with MPICommExecutor() as executor: if executor is None: return # worker process tic = time.time() image = list(executor.map(julia_line, range(h), chunksize=10)) toc = time.time() print("%s Set %dx%d in %.2f seconds." % ('Julia', w, h, toc-tic)) if len(sys.argv) > 1 and sys.argv[1] == '-plot': plot(image) if __name__ == '__main__': test_julia()<|fim▁end|>
warnings.simplefilter('ignore', UserWarning)
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/*jslint browser: true */ /*global require, app: true, $, Backbone, window, document */ require([ 'router' ], function () { <|fim▁hole|><|fim▁end|>
'use strict'; });
<|file_name|>OrthomclToolbox.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 # -*- coding: utf-8 -*- # # Copyright 2012 Unknown <diogo@arch> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. try: from process.sequence import Alignment from base.plotter import bar_plot, multi_bar_plot from process.error_handling import KillByUser except ImportError: from trifusion.process.sequence import Alignment from trifusion.base.plotter import bar_plot, multi_bar_plot from trifusion.process.error_handling import KillByUser from collections import OrderedDict, Counter import pickle import os import sqlite3 from os.path import join import random import string import copy class Cluster(object): """ Object for clusters of the OrthoMCL groups file. It is useful to set a number of attributes that will make subsequent filtration and processing much easier """ def __init__(self, line_string): """ To initialize a Cluster object, only a string compliant with the format of a cluster in an OrthoMCL groups file has to be provided. This line should contain the name of the group, a colon, and the sequences belonging to that group separated by whitespace :param line_string: String of a cluster """ # Initializing attributes for parse_string self.name = None self.sequences = None self.species_frequency = {} # Initializing attributes for apply filter # If the value is different than None, this will inform downstream # objects of whether this cluster is compliant with the specified # gene_threshold self.gene_compliant = None # If the value is different than None, this will inform downstream # objects of whether this cluster is compliant with the specified # species_threshold self.species_compliant = None self.parse_string(line_string) def parse_string(self, cluster_string): """ Parses the string and sets the group name and sequence list attributes """ fields = cluster_string.split(":") # Setting the name and sequence list of the clusters self.name = fields[0].strip() self.sequences = fields[1].strip().split() # Setting the gene frequency for each species in the cluster self.species_frequency = Counter([field.split("|")[0] for field in self.sequences]) def remove_taxa(self, taxa_list): """ Removes the taxa contained in taxa_list from self.sequences and self.species_frequency :param taxa_list: list, each element should be a taxon name """ self.sequences = [x for x in self.sequences if x.split("|")[0] not in taxa_list] self.species_frequency = dict((taxon, val) for taxon, val in self.species_frequency.items() if taxon not in taxa_list) def apply_filter(self, gene_threshold, species_threshold): """ This method will update two Cluster attributes, self.gene_flag and self.species_flag, which will inform downstream objects if this cluster respects the gene and species threshold :param gene_threshold: Integer for the maximum number of gene copies per species :param species_threshold: Integer for the minimum number of species present """ # Check whether cluster is compliant with species_threshold if len(self.species_frequency) >= species_threshold and \ species_threshold: self.species_compliant = True else: self.species_compliant = False # Check whether cluster is compliant with gene_threshold if max(self.species_frequency.values()) <= gene_threshold and \ gene_threshold: self.gene_compliant = True else: self.gene_compliant = False class OrthoGroupException(Exception): pass class GroupLight(object): """ Analogous to Group object but with several changes to reduce memory usage """ def __init__(self, groups_file, gene_threshold=None, species_threshold=None, ns=None): self.gene_threshold = gene_threshold if gene_threshold else None self.species_threshold = species_threshold if species_threshold \ else None # Attribute containing the list of included species self.species_list = [] # Attribute that will contain taxa to be excluded from analyses self.excluded_taxa = [] self.species_frequency = [] # Attributes that will store the number (int) of cluster after gene and # species filter self.all_clusters = 0 self.num_gene_compliant = 0 self.num_species_compliant = 0 self.all_compliant = 0 # Attribute containing the total number of sequences self.total_seqs = 0 # Attribute containing the maximum number of extra copies found in the # clusters self.max_extra_copy = 0 # Attribute with name of the group file, which will be an ID self.name = os.path.abspath(groups_file) self.table = groups_file.split(os.sep)[-1].split(".")[0] # Initialize atribute containing the groups filtered using the gene and # species threshold. This attribute can be updated at any time using # the update_filtered_group method self.filtered_groups = [] self._parse_groups(ns) if type(self.species_threshold) is float: self._get_sp_proportion() def groups(self): """ Generator for group file. This replaces the self.groups attribute of the original Group Object. Instead of loading the whole file into memory, a generator is created to iterate over its contents. It may run a bit slower but its a lot more memory efficient. :return: """ file_handle = open(self.name) for line in file_handle: if line.strip() != "": yield line.strip() def iter_species_frequency(self): """ In order to prevent permanent changes to the species_frequency attribute due to the filtering of taxa, this iterable should be used instead of the said variable. This creates a temporary deepcopy of species_frequency which will be iterated over and eventually modified. """ # Since the items of species_frequency are mutable, this ensures # that even those objects are correctly cloned sp_freq = copy.deepcopy(self.species_frequency) for cl in sp_freq: yield cl def _remove_tx(self, line): """ Given a group line, remove all references to the excluded taxa :param line: raw group file line """ new_line = "{}:".format(line.split(":")[0]) tx_str = "\t".join([x for x in line.split(":")[1].split() if x.split("|")[0] not in self.excluded_taxa]) return new_line + tx_str def _apply_filter(self, cl): """ Sets or updates the basic group statistics, such as the number of orthologs compliant with the gene copy and minimum taxa filters. :param cl: dictionary. Contains the number of occurrences for each taxon present in the ortholog cluster (e.g. {"taxonA": 2, "taxonB": 1). """ # First, remove excluded taxa from the cl object since this will # impact all other filters for tx in self.excluded_taxa: cl.pop(tx, None) if cl: self.all_clusters += 1 extra_copies = max(cl.values()) if extra_copies > self.max_extra_copy: self.max_extra_copy = extra_copies if extra_copies <= self.gene_threshold and self.gene_threshold and\ len(cl) >= self.species_threshold and \ self.species_threshold: self.num_gene_compliant += 1 self.num_species_compliant += 1 self.all_compliant += 1 elif (extra_copies <= self.gene_threshold and self.gene_threshold) or self.gene_threshold == 0: self.num_gene_compliant += 1 elif len(cl) >= self.species_threshold and \ self.species_threshold: self.num_species_compliant += 1 def _get_compliance(self, cl): """ Determines whether an ortholog cluster is compliant with the specified ortholog filters. :param ccl: dictionary. Contains the number of occurrences for each taxon present in the ortholog cluster (e.g. {"taxonA": 2, "taxonB": 1). :return: tuple. The first element refers to the gene copy filter while the second refers to the minimum taxa filter. Values of 1 indicate that the ortholg cluster is compliant. """ for tx in self.excluded_taxa: cl.pop(tx, None) if cl: cp = max(cl.values()) if not self.gene_threshold and not self.species_threshold: return 1, 1 if cp <= self.gene_threshold and self.gene_threshold and\ len(cl) >= self.species_threshold and \ self.species_threshold: return 1, 1 elif (cp <= self.gene_threshold and self.gene_threshold) or \ not self.gene_threshold: return 1, 0 elif (len(cl) >= self.species_threshold and self.species_threshold) or not self.species_threshold: return 0, 1 else: return 0, 0 def _reset_counter(self): self.all_clusters = 0 self.num_gene_compliant = 0 self.num_species_compliant = 0 self.all_compliant = 0 def _parse_groups(self, ns=None): for cl in self.groups(): if ns: if ns.stop: raise KillByUser("") # Retrieve the field containing the ortholog sequences sequence_field = cl.split(":")[1] # Update species frequency list sp_freq = Counter((x.split("|")[0] for x in sequence_field.split())) self.species_frequency.append(sp_freq) # Update number of sequences self.total_seqs += len(sequence_field) # Update max number of extra copies extra_copies = max(sp_freq.values()) if extra_copies > self.max_extra_copy: self.max_extra_copy = max(sp_freq.values()) self.species_list.extend([x for x in sp_freq if x not in self.species_list]) # Apply filters, if any # gene filter if self.species_threshold and self.gene_threshold: self._apply_filter(sp_freq) def exclude_taxa(self, taxa_list, update_stats=False): """ Updates the excluded_taxa attribute and updates group statistics if update_stats is True. This does not change the Group object data permanently, only sets an attribute that will be taken into account when plotting and exporting data. :param taxa_list: list. List of taxa that should be excluded from downstream operations :param update_stats: boolean. If True, it will update the group statistics """ # IF the taxa_list is the same as the excluded_taxa attribute, # there is nothing to do if sorted(taxa_list) == sorted(self.excluded_taxa): return self.species_list = [x for x in self.species_list + self.excluded_taxa if x not in taxa_list] self.excluded_taxa = taxa_list if update_stats: self._reset_counter() for cl in self.iter_species_frequency(): self._apply_filter(cl) def basic_group_statistics(self, update_stats=True): if update_stats: self._reset_counter() for cl in self.iter_species_frequency(): self._apply_filter(cl) return len(self.species_frequency), self.total_seqs, \ self.num_gene_compliant, self.num_species_compliant, \ self.all_compliant def _get_sp_proportion(self): """ When the species filter is a float value between 0 and 1, convert this proportion into absolute values (rounded up), since filters were already designed for absolutes. """ self.species_threshold = int(self.species_threshold * len(self.species_list)) def update_filters(self, gn_filter, sp_filter, update_stats=False): """ Updates the group filter attributes and group summary stats if update_stats is True. This method does not change the data of the Group object, only sets attributes that will be taken into account when plotting or exporting data :param gn_filter: integer. Maximum number of gene copies allowed in an ortholog cluster :param sp_filter: integer/float. Minimum number/proportion of taxa representation :param update_stats: boolean. If True it will update the group summary statistics """ # If the provided filters are the same as the current group attributes # there is nothing to do if (gn_filter, sp_filter) == (self.gene_threshold, self.species_threshold): return self.gene_threshold = gn_filter self.species_threshold = sp_filter if type(self.species_threshold) is float: self._get_sp_proportion() if update_stats: self._reset_counter() for cl in self.iter_species_frequency(): self._apply_filter(cl) def retrieve_sequences(self, sqldb, protein_db, dest="./", shared_namespace=None, outfile=None): """ :param sqldb: srting. Path to sqlite database file :param protein_db: string. Path to protein database file :param dest: string. Directory where sequences will be exported :param shared_namespace: Namespace object to communicate with TriFusion's main process :param outfile: If set, all sequeces will be instead saved in a single output file. This is used for the nucleotide sequence export :return: """ if not os.path.exists(dest) and not outfile: os.makedirs(dest) if not os.path.exists(join(dest, "header_correspondance")): os.makedirs(join(dest, "header_correspondance")) if shared_namespace: shared_namespace.act = shared_namespace.msg = "Creating database" # Stores sequences that could not be retrieved shared_namespace.missed = shared_namespace.counter = 0 shared_namespace.progress = 0 # Get number of lines of protein database p = 0 with open(protein_db) as fh: for p, _ in enumerate(fh): pass shared_namespace.max_pb = shared_namespace.total = p + 1 # Connect to database con = sqlite3.connect(sqldb) c = con.cursor() table_name = "".join([x for x in protein_db if x.isalnum()]).encode( "utf8") # Create table if it does not exist if not c.execute("SELECT name FROM sqlite_master WHERE type='table' " "AND name='{}'".format(table_name)).fetchall(): c.execute("CREATE TABLE [{}] (seq_id text PRIMARY KEY, seq text)". format(table_name)) # Populate database with open(protein_db) as ph: seq = "" for line in ph: # Kill switch if shared_namespace: if shared_namespace.stop: con.close() raise KillByUser("") shared_namespace.progress += 1 shared_namespace.counter += 1 if line.startswith(">"): if seq != "": c.execute("INSERT INTO [{}] VALUES (?, ?)". format(table_name), (seq_id, seq)) seq_id = line.strip()[1:] seq = "" else: seq += line.strip() con.commit() if shared_namespace: shared_namespace.act = shared_namespace.msg = "Fetching sequences" shared_namespace.good = shared_namespace.counter = 0 shared_namespace.progress = 0 shared_namespace.max_pb = shared_namespace.total = \ self.all_compliant # Set single output file, if option is set if outfile: output_handle = open(join(dest, outfile), "w") # Fetching sequences for line, cl in zip(self.groups(), self.iter_species_frequency()): # Kill switch if shared_namespace: if shared_namespace.stop: con.close() raise KillByUser("") # Filter sequences if self._get_compliance(cl) == (1, 1): if shared_namespace: shared_namespace.good += 1 shared_namespace.progress += 1 shared_namespace.counter += 1 # Retrieve sequences from current cluster if self.excluded_taxa: line = self._remove_tx(line) fields = line.split(":") # Open file if not outfile: cl_name = fields[0] oname = join(dest, cl_name) mname = join(dest, "header_correspondance", cl_name) output_handle = open(oname + ".fas", "w") map_handle = open(mname + "_headerMap.csv", "w") seqs = fields[-1].split() for i in seqs: # Query database c.execute("SELECT * FROM [{}] WHERE seq_id = ?". format(table_name), (i,)) vals = c.fetchone() # Handles cases where the sequence could not be retrieved # If outfile is set, output_handle will be a single file # for all groups. If not, it will represent an individual # group file try: if not outfile: tx_name = vals[0].split("|")[0] output_handle.write(">{}\n{}\n".format(tx_name, vals[1])) map_handle.write("{}; {}\n".format(vals[0], tx_name)) else: output_handle.write(">{}\n{}\n".format(vals[0], vals[1])) except TypeError: pass if not outfile: output_handle.close() if outfile: output_handle.close() con.close() def export_filtered_group(self, output_file_name="filtered_groups", dest="./", shared_namespace=None): if shared_namespace: shared_namespace.act = "Exporting filtered orthologs" shared_namespace.missed = 0 shared_namespace.good = 0 output_handle = open(os.path.join(dest, output_file_name), "w") for p, (line, cl) in enumerate(zip(self.groups(), self.iter_species_frequency())): if shared_namespace: if shared_namespace.stop: raise KillByUser("") if shared_namespace: shared_namespace.progress = p if self._get_compliance(cl) == (1, 1): if shared_namespace: shared_namespace.good += 1 if self.excluded_taxa: l = self._remove_tx(line) else: l = line output_handle.write("{}\n".format(l)) output_handle.close() def bar_species_distribution(self, filt=False): if filt: data = Counter((len(cl) for cl in self.iter_species_frequency() if self._get_compliance(cl) == (1, 1))) else: data = Counter((len(cl) for cl in self.species_frequency)) x_labels = [x for x in list(data)] data = list(data.values()) # When data is empty, return an exception if not data: return {"data": None} # Sort lists x_labels = [list(x) for x in zip(*sorted(zip(x_labels, data)))][0] # Convert label to strings x_labels = [str(x) for x in x_labels] title = "Taxa frequency distribution" ax_names = ["Number of taxa", "Ortholog frequency"] return {"data": [data], "title": title, "ax_names": ax_names, "labels": x_labels, "table_header": ["Number of species", "Ortholog frequency"]} def bar_genecopy_distribution(self, filt=False): """ Creates a bar plot with the distribution of gene copies across clusters :param filt: Boolean, whether or not to use the filtered groups. """ if filt: data = Counter((max(cl.values()) for cl in self.iter_species_frequency() if self._get_compliance(cl) == (1, 1))) else: data = Counter((max(cl.values()) for cl in self.species_frequency if cl)) x_labels = [x for x in list(data)] data = list(data.values()) # When data is empty, return an exception if not data: return {"data": None} x_labels, data = (list(x) for x in zip(*sorted(zip(x_labels, data)))) # Convert label to strings x_labels = [str(x) for x in x_labels] title = "Gene copy distribution" ax_names = ["Number of gene copies", "Ortholog frequency"] return {"data": [data], "labels": x_labels, "title": title, "ax_names": ax_names, "table_header": ["Number of gene copies", "Ortholog frequency"]} def bar_species_coverage(self, filt=False): """ Creates a stacked bar plot with the proportion of :return: """ data = Counter(dict((x, 0) for x in self.species_list)) self._reset_counter() for cl in self.iter_species_frequency(): self._apply_filter(cl) if filt: data += Counter(dict((x, 1) for x, y in cl.items() if y > 0 and self._get_compliance(cl) == (1, 1))) else: data += Counter(dict((x, 1) for x, y in cl.items() if y > 0)) data = data.most_common() # When data is empty, return an exception if not data: return {"data": None} x_labels = [str(x[0]) for x in data] data = [[x[1] for x in data], [self.all_clusters - x[1] if not filt else self.all_compliant - x[1] for x in data]] lgd_list = ["Available data", "Missing data"] ax_names = [None, "Ortholog frequency"] return {"data": data, "labels": x_labels, "lgd_list": lgd_list, "ax_names": ax_names} def bar_genecopy_per_species(self, filt=False): data = Counter(dict((x, 0) for x in self.species_list)) self._reset_counter() for cl in self.iter_species_frequency(): self._apply_filter(cl) if filt: data += Counter(dict((x, y) for x, y in cl.items() if y > 1 and self._get_compliance(cl) == (1, 1))) else: data += Counter(dict((x, y) for x, y in cl.items() if y > 1)) data = data.most_common() # When data is empty, return an exception if not data: return {"data": None} x_labels = [str(x[0]) for x in data] data = [[x[1] for x in data]] ax_names = [None, "Gene copies"] return {"data": data, "labels": x_labels, "ax_names": ax_names} class Group(object): """ This represents the main object of the orthomcl toolbox module. It is initialized with a file name of a orthomcl groups file and provides several methods that act on that group file. To process multiple Group objects, see MultiGroups object """ def __init__(self, groups_file, gene_threshold=None, species_threshold=None, project_prefix="MyGroups"): # Initializing thresholds. These may be set from the start, or using # some method that uses them as arguments self.gene_threshold = gene_threshold self.species_threshold = species_threshold # Attribute containing the list of included species self.species_list = [] # Attribute that will contain taxa to be excluded from analyses self.excluded_taxa = [] # Attributes that will store the number (int) of cluster after gene and # species filter self.all_compliant = 0 self.num_gene_compliant = 0 self.num_species_compliant = 0 # Attribute containing the total number of sequences self.total_seqs = 0 # Attribute containing the maximum number of extra copies found in the # clusters self.max_extra_copy = 0 # Attribute with name of the group file, which will be an ID self.group_name = groups_file # Initialize the project prefix for possible output files self.prefix = project_prefix # Initialize attribute containing the original groups self.groups = [] # Initialize atribute containing the groups filtered using the gene and # species threshold. This attribute can be updated at any time using # the update_filtered_group method self.filtered_groups = [] self.name = None # Parse groups file and populate groups attribute self.__parse_groups(groups_file) def __parse_groups(self, groups_file): """ Parses the ortholog clusters in the groups file and populates the self.groups list with Cluster objects for each line in the groups file. :param groups_file: File name for the orthomcl groups file :return: populates the groups attribute """ self.name = groups_file self.species_list = [] groups_file_handle = open(groups_file) for line in groups_file_handle: cluster_object = Cluster(line) # Add cluster to general group list self.groups.append(cluster_object) # Update total sequence counter self.total_seqs += len(cluster_object.sequences) # Update maximum number of extra copies, if needed if max(cluster_object.species_frequency.values()) > \ self.max_extra_copy: self.max_extra_copy = \ max(cluster_object.species_frequency.values()) # Update species_list attribute self.species_list = list(set(self.species_list).union( set(cluster_object.species_frequency.keys()))) # If thresholds have been specified, update self.filtered_groups # attribute if self.species_threshold and self.gene_threshold: cluster_object.apply_filter(self.gene_threshold, self.species_threshold) if cluster_object.species_compliant and \ cluster_object.gene_compliant: # Add cluster to the filtered group list self.filtered_groups.append(cluster_object) self.all_compliant += 1 # Update num_species_compliant attribute if cluster_object.species_compliant: self.num_species_compliant += 1 # Update num_gene_compliant attribute if cluster_object.gene_compliant: self.num_gene_compliant += 1 def exclude_taxa(self, taxa_list): """ Adds a taxon_name to the excluded_taxa list and updates the filtered_groups list """ self.excluded_taxa.extend(taxa_list) # Storage variable for new filtered groups filtered_groups = [] # Reset max_extra_copy attribute<|fim▁hole|> if cl.iter_sequences and cl.species_frequency: filtered_groups.append(cl) # Update maximum number of extra copies, if needed if max(cl.species_frequency.values()) > self.max_extra_copy: self.max_extra_copy = max(cl.species_frequency.values()) # Update species_list self.species_list = sorted(list(set(self.species_list) - set(taxa_list))) self.filtered_groups = self.groups = filtered_groups def get_filters(self): """ Returns a tuple with the thresholds for max gene copies and min species """ return self.gene_threshold, self.species_threshold def basic_group_statistics(self): """ This method creates a basic table in list format containing basic information of the groups file (total number of clusters, total number of sequences, number of clusters below the gene threshold, number of clusters below the species threshold and number of clusters below the gene AND species threshold) :return: List containing number of [total clusters, total sequences, clusters above gene threshold, clusters above species threshold, clusters above gene and species threshold] """ # Total number of clusters total_cluster_num = len(self.groups) # Total number of sequenes total_sequence_num = self.total_seqs # Gene compliant clusters clusters_gene_threshold = self.num_gene_compliant # Species compliant clusters clusters_species_threshold = self.num_species_compliant clusters_all_threshold = len(self.filtered_groups) statistics = [total_cluster_num, total_sequence_num, clusters_gene_threshold, clusters_species_threshold, clusters_all_threshold] return statistics def paralog_per_species_statistic(self, output_file_name= "Paralog_per_species.csv", filt=True): """ This method creates a CSV table with information on the number of paralog clusters per species :param output_file_name: string. Name of the output csv file :param filt: Boolean. Whether to use the filtered groups (True) or total groups (False) """ # Setting which clusters to use if filt: groups = self.filtered_groups else: groups = self.groups paralog_count = dict((species, 0) for species in self.species_list) for cluster in groups: for species in paralog_count: if cluster.species_frequency[species] > 1: paralog_count[species] += 1 # Writing table output_handle = open(output_file_name, "w") output_handle.write("Species; Clusters with paralogs\n") for species, val in paralog_count.items(): output_handle.write("%s; %s\n" % (species, val)) output_handle.close() def export_filtered_group(self, output_file_name="filtered_groups", dest="./", get_stats=False, shared_namespace=None): """ Export the filtered groups into a new file. :param output_file_name: string, name of the filtered groups file :param dest: string, path to directory where the filtered groups file will be created :param get_stats: Boolean, whether to return the basic count stats or not :param shared_namespace: Namespace object, for communicating with main process. """ if self.filtered_groups: if shared_namespace: shared_namespace.act = "Exporting filtered orthologs" output_handle = open(os.path.join(dest, output_file_name), "w") if get_stats: all_orthologs = len(self.groups) sp_compliant = 0 gene_compliant = 0 final_orthologs = 0 for cluster in self.filtered_groups: if shared_namespace: shared_namespace.progress = \ self.filtered_groups.index(cluster) if cluster.species_compliant and cluster.gene_compliant: output_handle.write("%s: %s\n" % ( cluster.name, " ".join(cluster.iter_sequences))) if get_stats: final_orthologs += 1 if get_stats: if cluster.species_compliant: sp_compliant += 1 if cluster.gene_compliant: gene_compliant += 1 output_handle.close() if get_stats: return all_orthologs, sp_compliant, gene_compliant,\ final_orthologs else: raise OrthoGroupException("The groups object must be filtered " "before using the export_filtered_group" "method") def update_filters(self, gn_filter, sp_filter): """ Sets new values for the self.species_threshold and self.gene_threshold and updates the filtered_group :param gn_filter: int. Maximum value for gene copies in cluster :param sp_filter: int. Minimum value for species in cluster """ self.species_threshold = int(sp_filter) self.gene_threshold = int(gn_filter) self.update_filtered_group() def update_filtered_group(self): """ This method creates a new filtered group variable, like export_filtered_group, but instead of writing into a new file, it replaces the self.filtered_groups variable """ self.filtered_groups = [] # Reset gene and species compliant counters self.num_gene_compliant = 0 self.num_species_compliant = 0 for cluster in self.groups: cluster.apply_filter(self.gene_threshold, self.species_threshold) if cluster.species_compliant and cluster.gene_compliant: self.filtered_groups.append(cluster) # Update num_species_compliant attribute if cluster.species_compliant: self.num_species_compliant += 1 # Update num_gene_compliant attribute if cluster.gene_compliant: self.num_gene_compliant += 1 def retrieve_sequences(self, database, dest="./", mode="fasta", filt=True, shared_namespace=None): """ When provided with a database in Fasta format, this will use the Alignment object to retrieve sequences :param database: String. Fasta file :param dest: directory where files will be save :param mode: string, whether to retrieve sequences to a file ('fasta'), or a dictionary ('dict') :param filt: Boolean. Whether to use the filtered groups (True) or total groups (False) :param shared_namespace: Namespace object. This argument is meant for when fast are retrieved in a background process, where there is a need to update the main process of the changes in this method :param dest: string. Path to directory where the retrieved sequences will be created. """ if mode == "dict": seq_storage = {} if filt: groups = self.filtered_groups else: groups = self.groups if not os.path.exists("Orthologs"): os.makedirs("Orthologs") # Update method progress if shared_namespace: shared_namespace.act = "Creating database" shared_namespace.progress = 0 print("Creating db") # Check what type of database was provided #TODO: Add exception handling if file is not parsed with Aligment if isinstance(database, str): try: db_aln = pickle.load(open(database, "rb")) except (EnvironmentError, pickle.UnpicklingError): db_aln = Alignment(database) db_aln = db_aln.alignment elif isinstance(database, dict): db_aln = database else: raise OrthoGroupException("The input database is neither a string" "nor a dictionary object") print("Retrieving seqs") # Update method progress if shared_namespace: shared_namespace.act = "Retrieving sequences" for cluster in groups: if shared_namespace: shared_namespace.progress += 1 if mode == "dict": seq_storage[cluster.name] = [] output_handle = open(join(dest, cluster.name + ".fas"), "w") for sequence_id in cluster.iter_sequences: seq = db_aln[sequence_id] if mode == "fasta": output_handle.write(">%s\n%s\n" % (sequence_id, seq)) elif mode == "dict": seq_storage[cluster.name].append([sequence_id.split("|")[0], seq]) output_handle.close() if mode == "dict": return seq_storage def bar_species_distribution(self, dest="./", filt=False, ns=None, output_file_name="Species_distribution"): """ Creates a bar plot with the distribution of species numbers across clusters :param dest: string, destination directory :param filt: Boolean, whether or not to use the filtered groups. :param output_file_name: string, name of the output file """ data = [] # Determine which groups to use if filt: groups = self.filtered_groups else: groups = self.groups for i in groups: if ns: if ns.stop: raise KillByUser("") data.append(len([x for x, y in i.species_frequency.items() if y > 0])) # Transform data into histogram-like transform_data = Counter(data) x_labels = [x for x in list(transform_data)] y_vals = list(transform_data.values()) # Sort lists x_labels, y_vals = (list(x) for x in zip(*sorted(zip(x_labels, y_vals)))) # Convert label to strings x_labels = [str(x) for x in x_labels] if ns: if ns.stop: raise KillByUser("") # Create plot b_plt, lgd, _ = bar_plot([y_vals], x_labels, title="Taxa frequency distribution", ax_names=["Number of taxa", "Ortholog frequency"]) b_plt.savefig(os.path.join(dest, output_file_name), bbox_inches="tight", dpi=400) # Create table table_list = [["Number of species", "Ortholog frequency"]] for x, y in zip(x_labels, y_vals): table_list.append([x, y]) return b_plt, lgd, table_list def bar_genecopy_distribution(self, dest="./", filt=False, output_file_name="Gene_copy_distribution.png"): """ Creates a bar plot with the distribution of gene copies across clusters :param dest: string, destination directory :param filt: Boolean, whether or not to use the filtered groups. :param output_file_name: string, name of the output file """ data = [] # Determin which groups to use if filt: groups = self.filtered_groups else: groups = self.groups for cl in groups: # Get max number of copies max_copies = max(cl.species_frequency.values()) data.append(max_copies) # Transform data into histogram-like transform_data = Counter(data) x_labels = [x for x in list(transform_data)] y_vals = list(transform_data.values()) # Sort lists x_labels, y_vals = (list(x) for x in zip(*sorted(zip(x_labels, y_vals)))) # Convert label to strings x_labels = [str(x) for x in x_labels] # Create plot b_plt, lgd, _ = bar_plot([y_vals], x_labels, title="Gene copy distribution", ax_names=["Number of gene copies", "Ortholog frequency"], reverse_x=False) b_plt.savefig(os.path.join(dest, output_file_name), bbox_inches="tight", figsize=(8 * len(x_labels) / 4, 6), dpi=200) # Create table table_list = [["Number of gene copies", "Ortholog frequency"]] for x, y in zip(x_labels, y_vals): table_list.append([x, y]) return b_plt, lgd, table_list def bar_species_coverage(self, dest="./", filt=False, ns=None, output_file_name="Species_coverage"): """ Creates a stacked bar plot with the proportion of :return: """ # Determine which groups to use if filt: groups = self.filtered_groups else: groups = self.groups data = Counter(dict((x, 0) for x in self.species_list)) for cl in groups: if ns: if ns.stop: raise KillByUser("") data += Counter(dict((x, 1) for x, y in cl.species_frequency.items() if y > 0)) xlabels = [str(x) for x in list(data.keys())] data = [list(data.values()), [len(groups) - x for x in data.values()]] lgd_list = ["Available data", "Missing data"] if ns: if ns.stop: raise KillByUser("") b_plt, lgd, _ = bar_plot(data, xlabels, lgd_list=lgd_list, ax_names=[None, "Ortholog frequency"]) b_plt.savefig(os.path.join(dest, output_file_name), bbox_inches="tight", dpi=200) return b_plt, lgd, "" class MultiGroups(object): """ Creates an object composed of multiple Group objects """ def __init__(self, groups_files=None, gene_threshold=None, species_threshold=None, project_prefix="MyGroups"): """ :param groups_files: A list containing the file names of the multiple group files :return: Populates the self.multiple_groups attribute """ # If a MultiGroups is initialized with duplicate Group objects, these # will be stored in a list. If all Group objects are unique, the list # will remain empty self.duplicate_groups = [] # Initializing thresholds. These may be set from the start, or using # some method that uses them as arguments self.gene_threshold = gene_threshold self.species_threshold = species_threshold self.prefix = project_prefix self.multiple_groups = {} self.filters = {} if groups_files: for group_file in groups_files: # If group_file is already a Group object, just add it if not isinstance(group_file, Group): # Check for duplicate group files group_object = Group(group_file, self.gene_threshold, self.species_threshold) else: group_object = group_file if group_object.name in self.multiple_groups: self.duplicate_groups.append(group_object.name) else: self.multiple_groups[group_object.name] = group_object self.filters[group_object.name] = (1, len(group_object.species_list)) def __iter__(self): return iter(self.multiple_groups) def iter_gnames(self): return (x.name for x in self.multiple_groups) def get_gnames(self): return [x.name for x in self.multiple_groups] def add_group(self, group_obj): """ Adds a group object :param group_obj: Group object """ # Check for duplicate groups if group_obj.name in self.multiple_groups: self.duplicate_groups.append(group_obj.name) else: self.multiple_groups[group_obj.name] = group_obj def remove_group(self, group_id): """ Removes a group object according to its name :param group_id: string, name matching a Group object name attribute """ if group_id in self.multiple_groups: del self.multiple_groups[group_id] def get_group(self, group_id): """ Returns a group object based on its name. If the name does not match any group object, returns None :param group_id: string. Name of group object """ try: return self.multiple_groups[group_id] except KeyError: return def add_multigroups(self, multigroup_obj): """ Merges a MultiGroup object :param multigroup_obj: MultiGroup object """ for group_obj in multigroup_obj: self.add_group(group_obj) def update_filters(self, gn_filter, sp_filter, group_names=None, default=False): """ This will not change the Group object themselves, only the filter mapping. The filter is only applied when the Group object is retrieved to reduce computations :param gn_filter: int, filter for max gene copies :param sp_filter: int, filter for min species :param group_names: list, with names of group objects """ if group_names: for group_name in group_names: # Get group object group_obj = self.multiple_groups[group_name] # Define filters gn_filter = gn_filter if not default else 1 sp_filter = sp_filter if not default else \ len(group_obj.species_list) # Update Group object with new filters group_obj.update_filters(gn_filter, sp_filter) # Update filter map self.filters[group_name] = (gn_filter, sp_filter) for group_name, group_obj in self.multiple_groups.items(): # Define filters gn_filter = gn_filter if not default else 1 sp_filter = sp_filter if not default else \ len(group_obj.species_list) # Update Group object with new filters group_obj.update_filters(gn_filter, sp_filter) # Update filter map self.filters[group_name] = (gn_filter, sp_filter) def basic_multigroup_statistics(self, output_file_name= "multigroup_base_statistics.csv"): """ :param output_file_name: :return: """ # Creates the storage for the statistics of the several files statistics_storage = OrderedDict() for group in self.multiple_groups: group_statistics = group.basic_group_statistics() statistics_storage[group.name] = group_statistics output_handle = open(self.prefix + "." + output_file_name, "w") output_handle.write("Group file; Total clusters; Total sequences; " "Clusters below gene threshold; Clusters above " "species threshold; Clusters below gene and above" " species thresholds\n") for group, vals in statistics_storage.items(): output_handle.write("%s; %s\n" % (group, ";".join([str(x) for x in vals]))) output_handle.close() def bar_orthologs(self, output_file_name="Final_orthologs", dest="./", stats="total"): """ Creates a bar plot with the final ortholog values for each group file :param output_file_name: string. Name of output file :param dest: string. output directory :param stats: string. The statistics that should be used to generate the bar plot. Options are: ..: "1": Total orthologs ..: "2": Species compliant orthologs ..: "3": Gene compliant orthologs ..: "4": Final orthologs ..: "all": All of the above Multiple combinations can be provided, for instance: "123" will display bars for total, species compliant and gene compliant stats """ # Stores the x-axis labels x_labels = [] # Stores final ortholog values for all 4 possible data sets vals = [[], [], [], []] lgd = ["Total orthologs", "After species filter", "After gene filter", "Final orthologs"] # Get final ortholog values for g_obj in self.multiple_groups: x_labels.append(g_obj.name.split(os.sep)[-1]) # Populate total orthologs if "1" in stats or stats == "all": vals[0].append(len(g_obj.groups)) # Populate species compliant orthologs if "2" in stats or stats == "all": vals[1].append(g_obj.num_species_compliant) # Populate gene compliant orthologs if "3" in stats or stats == "all": vals[2].append(g_obj.num_gene_compliant) # Populate final orthologs if "4" in stats or stats == "all": vals[3].append(len(g_obj.filtered_groups)) # Filter valid data sets lgd_list = [x for x in lgd if vals[lgd.index(x)]] vals = [l for l in vals if l] # Create plot b_plt, lgd = multi_bar_plot(vals, x_labels, lgd_list=lgd_list) b_plt.savefig(os.path.join(dest, output_file_name), bbox_extra_artists=(lgd,), bbox_inches="tight") # Create table list object table_list = [] # Create header table_list.append([""] + x_labels) # Create content for i in range(len(vals)): table_list += [x for x in [[lgd_list[i]] + vals[i]]] return b_plt, lgd, table_list def group_overlap(self): """ This will find the overlap of orthologs between two group files. THIS METHOD IS TEMPORARY AND EXPERIMENTAL """ def parse_groups(group_obj): """ Returns a list with the sorted ortholog clusters """ storage = [] for cluster in group_obj.groups: storage.append(set(cluster.iter_sequences)) return storage if len(self.multiple_groups) != 2: raise SystemExit("This method can only be used with two group " "files") group1 = self.multiple_groups[0] group2 = self.multiple_groups[1] group1_list = parse_groups(group1) group2_list = parse_groups(group2) counter = 0 for i in group1_list: if i in group2_list: counter += 1 class MultiGroupsLight(object): """ Creates an object composed of multiple Group objects like MultiGroups. However, instead of storing the groups in memory, these are shelved in the disk """ # The report calls available calls = ['bar_genecopy_distribution', 'bar_species_distribution', 'bar_species_coverage', 'bar_genecopy_per_species'] def __init__(self, db_path, groups=None, gene_threshold=None, species_threshold=None, project_prefix="MyGroups", ns=None): """ :param groups: A list containing the file names of the multiple group files :return: Populates the self.multiple_groups attribute """ self.db_path = db_path # If a MultiGroups is initialized with duplicate Group objects, their # names will be stored in a list. If all Group objects are unique, the # list will remain empty self.duplicate_groups = [] self.groups = {} self.groups_stats = {} # Attribute that will store the paths of badly formated group files self.bad_groups = [] # Initializing thresholds. These may be set from the start, or using # some method that uses them as arguments self.gene_threshold = gene_threshold self.species_threshold = species_threshold # Initializing mapping of group filters to their names. Should be # something like {"groupA": (1, 10)} self.filters = {} self.taxa_list = {} self.excluded_taxa = {} # This attribute will contain a dictionary with the maximum extra copies # for each group object self.max_extra_copy = {} # This attribute will contain a list with the number of species for # each group object, excluding replicates. If a MultiGroupLight object # contains Group objects with different taxa numbers, this attribute # can be used to issue a warning self.species_number = [] self.prefix = project_prefix if ns: ns.files = len(groups) if groups: for group_file in groups: # If group_file is already a Group object, just add it if not isinstance(group_file, GroupLight): try: if ns: if ns.stop: raise KillByUser("") ns.counter += 1 group_object = GroupLight(group_file, self.gene_threshold, self.species_threshold, ns=ns) except Exception as e: print(e.message) self.bad_groups.append(group_file) continue else: group_object = group_file # Check for duplicate group files if group_object.name in self.groups: self.duplicate_groups.append(group_file.name) else: self.add_group(group_object) def __iter__(self): for k, val in self.groups.items(): yield k, pickle.load(open(val, "rb")) def clear_groups(self): """ Clears the current MultiGroupsLight object """ for f in self.groups.values(): os.remove(f) self.duplicate_groups = [] self.groups = {} self.groups_stats = {} self.filters = {} self.max_extra_copy = {} self.species_number = [] self.gene_threshold = self.species_threshold = 0 def add_group(self, group_obj): """ Adds a group object :param group_obj: Group object """ # Check for duplicate groups if group_obj.name not in self.groups: gpath = os.path.join(self.db_path, "".join(random.choice(string.ascii_uppercase) for _ in range(15))) pickle.dump(group_obj, open(gpath, "wb")) self.groups[group_obj.name] = gpath self.filters[group_obj.name] = (1, len(group_obj.species_list), []) self.max_extra_copy[group_obj.name] = group_obj.max_extra_copy if len(group_obj.species_list) not in self.species_number: self.species_number.append(len(group_obj.species_list)) else: self.duplicate_groups.append(group_obj.name) def remove_group(self, group_id): """ Removes a group object according to its name :param group_id: string, name matching a Group object name attribute """ if group_id in self.groups: os.remove(self.groups[group_id]) del self.groups[group_id] def get_group(self, group_id): """ Returns a group object based on its name. If the name does not match any group object, returns None :param group_id: string. Name of group object """ try: return pickle.load(open(self.groups[unicode(group_id)], "rb")) except KeyError: return def add_multigroups(self, multigroup_obj): """ Merges a MultiGroup object :param multigroup_obj: MultiGroup object """ for _, group_obj in multigroup_obj: self.add_group(group_obj) def update_filters(self, gn_filter, sp_filter, excluded_taxa, group_names=None, default=False): """ This will not change the Group object themselves, only the filter mapping. The filter is only applied when the Group object is retrieved to reduce computations :param gn_filter: int, filter for max gene copies :param sp_filter: int, filter for min species :param group_names: list, with names of group objects """ # There are no groups to update if group_names == []: return if group_names: glist = group_names else: glist = self.groups for group_name in glist: # Get group object group_obj = pickle.load(open(self.groups[group_name], "rb")) # Define excluded taxa group_obj.exclude_taxa(excluded_taxa, True) # Define filters gn_filter = gn_filter if not default else 1 sp_filter = sp_filter if not default else \ len(group_obj.species_list) # Correct maximum filter values after excluding taxa gn_filter = gn_filter if gn_filter <= group_obj.max_extra_copy \ else group_obj.max_extra_copy sp_filter = sp_filter if sp_filter <= len(group_obj.species_list) \ else len(group_obj.species_list) # Update Group object with new filters group_obj.update_filters(gn_filter, sp_filter) # Update group stats self.get_multigroup_statistics(group_obj) pickle.dump(group_obj, open(self.groups[group_name], "wb")) # Update filter map self.filters[group_name] = (gn_filter, group_obj.species_threshold) self.taxa_list[group_name] = group_obj.species_list self.excluded_taxa[group_name] = group_obj.excluded_taxa def get_multigroup_statistics(self, group_obj): """ :return: """ stats = group_obj.basic_group_statistics() self.groups_stats[group_obj.name] = {"stats": stats, "species": group_obj.species_list, "max_copies": group_obj.max_extra_copy} def bar_orthologs(self, group_names=None, output_file_name="Final_orthologs", dest="./", stats="all"): """ Creates a bar plot with the final ortholog values for each group file :param group_names: list. If None, all groups in self.group_stats will be used to generate the plot. Else, only the groups with the names in the list will be plotted. :param output_file_name: string. Name of output file :param dest: string. output directory :param stats: string. The statistics that should be used to generate the bar plot. Options are: ..: "1": Total orthologs ..: "2": Species compliant orthologs ..: "3": Gene compliant orthologs ..: "4": Final orthologs ..: "all": All of the above Multiple combinations can be provided, for instance: "123" will display bars for total, species compliant and gene compliant stats """ # Stores the x-axis labels x_labels = [] # Stores final ortholog values for all 4 possible data sets vals = [[], [], [], []] lgd = ["Total orthologs", "After species filter", "After gene filter", "Final orthologs"] # Determine which groups will be plotted if group_names: groups_lst = group_names else: groups_lst = self.groups_stats.keys() for gname in groups_lst: gstats = self.groups_stats[gname] x_labels.append(gname.split(os.sep)[-1]) # Populate total orthologs if "1" in stats or stats == "all": vals[0].append(gstats["stats"][0]) # Populate species compliant orthologs if "2" in stats or stats == "all": vals[1].append(gstats["stats"][3]) # Populate gene compliant orthologs if "3" in stats or stats == "all": vals[2].append(gstats["stats"][2]) # Populate final orthologs if "4" in stats or stats == "all": vals[3].append(gstats["stats"][4]) # Filter valid data sets lgd_list = [x for x in lgd if vals[lgd.index(x)]] vals = [l for l in vals if l] # Create plot b_plt, lgd = multi_bar_plot(vals, x_labels, lgd_list=lgd_list) b_plt.savefig(os.path.join(dest, output_file_name), bbox_extra_artists=(lgd,), bbox_inches="tight", dpi=200) # Create table list object table_list = [] # Create header table_list.append([""] + x_labels) # Create content for i in range(len(vals)): table_list += [x for x in [[lgd_list[i]] + vals[i]]] return b_plt, lgd, table_list __author__ = "Diogo N. Silva"<|fim▁end|>
self.max_extra_copy = 0 for cl in self.groups: cl.remove_taxa(taxa_list)
<|file_name|>forwarder.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # pylint: disable=W0212 import fcntl import inspect import logging import os import psutil import textwrap from devil import base_error from devil import devil_env from devil.android import device_errors from devil.android.constants import file_system from devil.android.sdk import adb_wrapper from devil.android.valgrind_tools import base_tool from devil.utils import cmd_helper logger = logging.getLogger(__name__) # If passed as the device port, this will tell the forwarder to allocate # a dynamic port on the device. The actual port can then be retrieved with # Forwarder.DevicePortForHostPort. DYNAMIC_DEVICE_PORT = 0 def _GetProcessStartTime(pid): p = psutil.Process(pid) if inspect.ismethod(p.create_time): return p.create_time() else: # Process.create_time is a property in old versions of psutil. return p.create_time def _DumpHostLog(): # The host forwarder daemon logs to /tmp/host_forwarder_log, so print the end # of that. try: with open('/tmp/host_forwarder_log') as host_forwarder_log: logger.info('Last 50 lines of the host forwarder daemon log:') for line in host_forwarder_log.read().splitlines()[-50:]: logger.info(' %s', line) except Exception: # pylint: disable=broad-except # Grabbing the host forwarder log is best-effort. Ignore all errors. logger.warning('Failed to get the contents of host_forwarder_log.') def _LogMapFailureDiagnostics(device): _DumpHostLog() # The device forwarder daemon logs to the logcat, so print the end of that. try: logger.info('Last 50 lines of logcat:') for logcat_line in device.adb.Logcat(dump=True)[-50:]: logger.info(' %s', logcat_line) except (device_errors.CommandFailedError, device_errors.DeviceUnreachableError): # Grabbing the device forwarder log is also best-effort. Ignore all errors. logger.warning('Failed to get the contents of the logcat.') # Log alive device forwarders. try: ps_out = device.RunShellCommand(['ps'], check_return=True) logger.info('Currently running device_forwarders:') for line in ps_out: if 'device_forwarder' in line: logger.info(' %s', line) except (device_errors.CommandFailedError, device_errors.DeviceUnreachableError): logger.warning('Failed to list currently running device_forwarder ' 'instances.') class _FileLock(object): """With statement-aware implementation of a file lock. File locks are needed for cross-process synchronization when the multiprocessing Python module is used. """ def __init__(self, path): self._fd = -1 self._path = path def __enter__(self): self._fd = os.open(self._path, os.O_RDONLY | os.O_CREAT) if self._fd < 0: raise Exception('Could not open file %s for reading' % self._path) fcntl.flock(self._fd, fcntl.LOCK_EX) def __exit__(self, _exception_type, _exception_value, traceback): fcntl.flock(self._fd, fcntl.LOCK_UN) os.close(self._fd) class HostForwarderError(base_error.BaseError): """Exception for failures involving host_forwarder.""" def __init__(self, message): super(HostForwarderError, self).__init__(message) class Forwarder(object): """Thread-safe class to manage port forwards from the device to the host.""" _DEVICE_FORWARDER_FOLDER = (file_system.TEST_EXECUTABLE_DIR + '/forwarder/') _DEVICE_FORWARDER_PATH = ( file_system.TEST_EXECUTABLE_DIR + '/forwarder/device_forwarder') _LOCK_PATH = '/tmp/chrome.forwarder.lock' # Defined in host_forwarder_main.cc _HOST_FORWARDER_LOG = '/tmp/host_forwarder_log' _TIMEOUT = 60 # seconds _instance = None @staticmethod def Map(port_pairs, device, tool=None): """Runs the forwarder. Args: port_pairs: A list of tuples (device_port, host_port) to forward. Note that you can specify 0 as a device_port, in which case a port will by dynamically assigned on the device. You can get the number of the assigned port using the DevicePortForHostPort method. device: A DeviceUtils instance. tool: Tool class to use to get wrapper, if necessary, for executing the forwarder (see valgrind_tools.py). Raises: Exception on failure to forward the port. """ if not tool: tool = base_tool.BaseTool() with _FileLock(Forwarder._LOCK_PATH): instance = Forwarder._GetInstanceLocked(tool) instance._InitDeviceLocked(device, tool) device_serial = str(device) map_arg_lists = [[ '--adb=' + adb_wrapper.AdbWrapper.GetAdbPath(), '--serial-id=' + device_serial, '--map', str(device_port), str(host_port) ] for device_port, host_port in port_pairs] logger.info('Forwarding using commands: %s', map_arg_lists) for map_arg_list in map_arg_lists: try: map_cmd = [instance._host_forwarder_path] + map_arg_list (exit_code, output) = cmd_helper.GetCmdStatusAndOutputWithTimeout( map_cmd, Forwarder._TIMEOUT) except cmd_helper.TimeoutError as e: raise HostForwarderError( '`%s` timed out:\n%s' % (' '.join(map_cmd), e.output)) except OSError as e: if e.errno == 2: raise HostForwarderError('Unable to start host forwarder. ' 'Make sure you have built host_forwarder.') else: raise if exit_code != 0: try: instance._KillDeviceLocked(device, tool) except (device_errors.CommandFailedError, device_errors.DeviceUnreachableError): # We don't want the failure to kill the device forwarder to # supersede the original failure to map. logger.warning( 'Failed to kill the device forwarder after map failure: %s', str(e)) _LogMapFailureDiagnostics(device) formatted_output = ('\n'.join(output) if isinstance(output, list) else output) raise HostForwarderError( '`%s` exited with %d:\n%s' % (' '.join(map_cmd), exit_code, formatted_output)) tokens = output.split(':') if len(tokens) != 2: raise HostForwarderError('Unexpected host forwarder output "%s", ' 'expected "device_port:host_port"' % output) device_port = int(tokens[0]) host_port = int(tokens[1]) serial_with_port = (device_serial, device_port) instance._device_to_host_port_map[serial_with_port] = host_port instance._host_to_device_port_map[host_port] = serial_with_port logger.info('Forwarding device port: %d to host port: %d.', device_port, host_port) @staticmethod def UnmapDevicePort(device_port, device): """Unmaps a previously forwarded device port. Args: device: A DeviceUtils instance. device_port: A previously forwarded port (through Map()). """ with _FileLock(Forwarder._LOCK_PATH): Forwarder._UnmapDevicePortLocked(device_port, device) @staticmethod def UnmapAllDevicePorts(device): """Unmaps all the previously forwarded ports for the provided device. Args: device: A DeviceUtils instance. port_pairs: A list of tuples (device_port, host_port) to unmap. """ with _FileLock(Forwarder._LOCK_PATH): instance = Forwarder._GetInstanceLocked(None) unmap_all_cmd = [ instance._host_forwarder_path, '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(), '--serial-id=%s' % device.serial, '--unmap-all' ] try: exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout( unmap_all_cmd, Forwarder._TIMEOUT) except cmd_helper.TimeoutError as e: raise HostForwarderError( '`%s` timed out:\n%s' % (' '.join(unmap_all_cmd), e.output)) if exit_code != 0: error_msg = [ '`%s` exited with %d' % (' '.join(unmap_all_cmd), exit_code) ] if isinstance(output, list): error_msg += output else: error_msg += [output] raise HostForwarderError('\n'.join(error_msg)) # Clean out any entries from the device & host map. device_map = instance._device_to_host_port_map host_map = instance._host_to_device_port_map for device_serial_and_port, host_port in device_map.items(): device_serial = device_serial_and_port[0] if device_serial == device.serial: del device_map[device_serial_and_port] del host_map[host_port] # Kill the device forwarder. tool = base_tool.BaseTool() instance._KillDeviceLocked(device, tool) @staticmethod def DevicePortForHostPort(host_port): """Returns the device port that corresponds to a given host port.""" with _FileLock(Forwarder._LOCK_PATH): serial_and_port = Forwarder._GetInstanceLocked( None)._host_to_device_port_map.get(host_port) return serial_and_port[1] if serial_and_port else None @staticmethod def RemoveHostLog(): if os.path.exists(Forwarder._HOST_FORWARDER_LOG): os.unlink(Forwarder._HOST_FORWARDER_LOG) @staticmethod def GetHostLog(): if not os.path.exists(Forwarder._HOST_FORWARDER_LOG): return '' with file(Forwarder._HOST_FORWARDER_LOG, 'r') as f: return f.read() @staticmethod def _GetInstanceLocked(tool): """Returns the singleton instance. Note that the global lock must be acquired before calling this method. Args: tool: Tool class to use to get wrapper, if necessary, for executing the forwarder (see valgrind_tools.py). """ if not Forwarder._instance: Forwarder._instance = Forwarder(tool) return Forwarder._instance def __init__(self, tool): """Constructs a new instance of Forwarder. Note that Forwarder is a singleton therefore this constructor should be called only once. Args: tool: Tool class to use to get wrapper, if necessary, for executing the forwarder (see valgrind_tools.py). """ assert not Forwarder._instance self._tool = tool self._initialized_devices = set() self._device_to_host_port_map = dict() self._host_to_device_port_map = dict() self._host_forwarder_path = devil_env.config.FetchPath('forwarder_host') assert os.path.exists(self._host_forwarder_path), 'Please build forwarder2' self._InitHostLocked() @staticmethod def _UnmapDevicePortLocked(device_port, device): """Internal method used by UnmapDevicePort(). Note that the global lock must be acquired before calling this method. """ instance = Forwarder._GetInstanceLocked(None) serial = str(device) serial_with_port = (serial, device_port) if serial_with_port not in instance._device_to_host_port_map: logger.error('Trying to unmap non-forwarded port %d', device_port) return host_port = instance._device_to_host_port_map[serial_with_port] del instance._device_to_host_port_map[serial_with_port] del instance._host_to_device_port_map[host_port] unmap_cmd = [ instance._host_forwarder_path, '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(), '--serial-id=%s' % serial, '--unmap', str(device_port) ] try: (exit_code, output) = cmd_helper.GetCmdStatusAndOutputWithTimeout( unmap_cmd, Forwarder._TIMEOUT) except cmd_helper.TimeoutError as e: raise HostForwarderError( '`%s` timed out:\n%s' % (' '.join(unmap_cmd), e.output)) if exit_code != 0: logger.error('`%s` exited with %d:\n%s', ' '.join(unmap_cmd), exit_code, '\n'.join(output) if isinstance(output, list) else output) @staticmethod def _GetPidForLock(): """Returns the PID used for host_forwarder initialization. The PID of the "sharder" is used to handle multiprocessing. The "sharder" is the initial process that forks that is the parent process. """ return os.getpgrp() def _InitHostLocked(self): """Initializes the host forwarder daemon. Note that the global lock must be acquired before calling this method. This method kills any existing host_forwarder process that could be stale. """ # See if the host_forwarder daemon was already initialized by a concurrent # process or thread (in case multi-process sharding is not used). # TODO(crbug.com/762005): Consider using a different implemention; relying # on matching the string represantion of the process start time seems # fragile. pid_for_lock = Forwarder._GetPidForLock() fd = os.open(Forwarder._LOCK_PATH, os.O_RDWR | os.O_CREAT) with os.fdopen(fd, 'r+') as pid_file: pid_with_start_time = pid_file.readline() if pid_with_start_time: (pid, process_start_time) = pid_with_start_time.split(':') if pid == str(pid_for_lock): if process_start_time == str(_GetProcessStartTime(pid_for_lock)): return self._KillHostLocked() pid_file.seek(0) pid_file.write( '%s:%s' % (pid_for_lock, str(_GetProcessStartTime(pid_for_lock)))) pid_file.truncate()<|fim▁hole|> def _InitDeviceLocked(self, device, tool): """Initializes the device_forwarder daemon for a specific device (once). Note that the global lock must be acquired before calling this method. This method kills any existing device_forwarder daemon on the device that could be stale, pushes the latest version of the daemon (to the device) and starts it. Args: device: A DeviceUtils instance. tool: Tool class to use to get wrapper, if necessary, for executing the forwarder (see valgrind_tools.py). """ device_serial = str(device) if device_serial in self._initialized_devices: return try: self._KillDeviceLocked(device, tool) except device_errors.CommandFailedError: logger.warning('Failed to kill device forwarder. Rebooting.') device.Reboot() forwarder_device_path_on_host = devil_env.config.FetchPath( 'forwarder_device', device=device) forwarder_device_path_on_device = ( Forwarder._DEVICE_FORWARDER_FOLDER if os.path.isdir(forwarder_device_path_on_host) else Forwarder._DEVICE_FORWARDER_PATH) device.PushChangedFiles([(forwarder_device_path_on_host, forwarder_device_path_on_device)]) cmd = [Forwarder._DEVICE_FORWARDER_PATH] wrapper = tool.GetUtilWrapper() if wrapper: cmd.insert(0, wrapper) device.RunShellCommand( cmd, env={'LD_LIBRARY_PATH': Forwarder._DEVICE_FORWARDER_FOLDER}, check_return=True) self._initialized_devices.add(device_serial) @staticmethod def KillHost(): """Kills the forwarder process running on the host.""" with _FileLock(Forwarder._LOCK_PATH): Forwarder._GetInstanceLocked(None)._KillHostLocked() def _KillHostLocked(self): """Kills the forwarder process running on the host. Note that the global lock must be acquired before calling this method. """ logger.info('Killing host_forwarder.') try: kill_cmd = [self._host_forwarder_path, '--kill-server'] (exit_code, output) = cmd_helper.GetCmdStatusAndOutputWithTimeout( kill_cmd, Forwarder._TIMEOUT) if exit_code != 0: logger.warning('Forwarder unable to shut down:\n%s', output) kill_cmd = ['pkill', '-9', 'host_forwarder'] (exit_code, output) = cmd_helper.GetCmdStatusAndOutputWithTimeout( kill_cmd, Forwarder._TIMEOUT) if exit_code in (0, 1): # pkill exits with a 0 if it was able to signal at least one process. # pkill exits with a 1 if it wasn't able to singal a process because # no matching process existed. We're ok with either. return _, ps_output = cmd_helper.GetCmdStatusAndOutputWithTimeout( ['ps', 'aux'], Forwarder._TIMEOUT) host_forwarder_lines = [line for line in ps_output.splitlines() if 'host_forwarder' in line] if host_forwarder_lines: logger.error('Remaining host_forwarder processes:\n %s', '\n '.join(host_forwarder_lines)) else: logger.error('No remaining host_forwarder processes?') _DumpHostLog() error_msg = textwrap.dedent("""\ `{kill_cmd}` failed to kill host_forwarder. exit_code: {exit_code} output: {output} """) raise HostForwarderError( error_msg.format( kill_cmd=' '.join(kill_cmd), exit_code=str(exit_code), output='\n'.join(' %s' % l for l in output.splitlines()))) except cmd_helper.TimeoutError as e: raise HostForwarderError( '`%s` timed out:\n%s' % (' '.join(kill_cmd), e.output)) @staticmethod def KillDevice(device, tool=None): """Kills the forwarder process running on the device. Args: device: Instance of DeviceUtils for talking to the device. tool: Wrapper tool (e.g. valgrind) that can be used to execute the device forwarder (see valgrind_tools.py). """ with _FileLock(Forwarder._LOCK_PATH): Forwarder._GetInstanceLocked(None)._KillDeviceLocked( device, tool or base_tool.BaseTool()) def _KillDeviceLocked(self, device, tool): """Kills the forwarder process running on the device. Note that the global lock must be acquired before calling this method. Args: device: Instance of DeviceUtils for talking to the device. tool: Wrapper tool (e.g. valgrind) that can be used to execute the device forwarder (see valgrind_tools.py). """ logger.info('Killing device_forwarder.') self._initialized_devices.discard(device.serial) if not device.FileExists(Forwarder._DEVICE_FORWARDER_PATH): return cmd = [Forwarder._DEVICE_FORWARDER_PATH, '--kill-server'] wrapper = tool.GetUtilWrapper() if wrapper: cmd.insert(0, wrapper) device.RunShellCommand( cmd, env={'LD_LIBRARY_PATH': Forwarder._DEVICE_FORWARDER_FOLDER}, check_return=True)<|fim▁end|>
<|file_name|>registry.go<|end_file_name|><|fim▁begin|>// Package registry is an interface for service discovery package registry import ( "errors" ) // The registry provides an interface for service discovery // and an abstraction over varying implementations // {consul, etcd, zookeeper, ...} type Registry interface { Register(*Service, ...RegisterOption) error Deregister(*Service) error GetService(string) ([]*Service, error) ListServices() ([]*Service, error) Watch(...WatchOption) (Watcher, error) String() string Options() Options } type Option func(*Options) type RegisterOption func(*RegisterOptions) type WatchOption func(*WatchOptions) var ( DefaultRegistry = newConsulRegistry() ErrNotFound = errors.New("not found") ) func NewRegistry(opts ...Option) Registry { return newConsulRegistry(opts...) } // Register a service node. Additionally supply options such as TTL. func Register(s *Service, opts ...RegisterOption) error { return DefaultRegistry.Register(s, opts...) } // Deregister a service node<|fim▁hole|>} // Retrieve a service. A slice is returned since we separate Name/Version. func GetService(name string) ([]*Service, error) { return DefaultRegistry.GetService(name) } // List the services. Only returns service names func ListServices() ([]*Service, error) { return DefaultRegistry.ListServices() } // Watch returns a watcher which allows you to track updates to the registry. func Watch(opts ...WatchOption) (Watcher, error) { return DefaultRegistry.Watch(opts...) } func String() string { return DefaultRegistry.String() }<|fim▁end|>
func Deregister(s *Service) error { return DefaultRegistry.Deregister(s)
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>// Karma configuration file, see link for more information // https://karma-runner.github.io/1.0/config/configuration-file.html module.exports = function (config) { config.set({ basePath: '', frameworks: ['jasmine', '@angular-devkit/build-angular'], plugins: [ require('karma-jasmine'),<|fim▁hole|> require('@angular-devkit/build-angular/plugins/karma') ], client: { clearContext: false // leave Jasmine Spec Runner output visible in browser }, coverageIstanbulReporter: { dir: require('path').join(__dirname, 'coverage'), reports: [ 'html', 'lcovonly' ], fixWebpackSourcePaths: true }, angularCli: { environment: 'dev' }, reporters: ['progress', 'kjhtml'], port: 9876, colors: true, logLevel: config.LOG_INFO, autoWatch: true, browsers: ['Chrome'], customLaunchers: { Chrome_travis_ci: { base: 'Chrome', flags: ['--no-sandbox'] } }, singleRun: false }); };<|fim▁end|>
require('karma-chrome-launcher'), require('karma-jasmine-html-reporter'), require('karma-coverage-istanbul-reporter'),
<|file_name|>ViewTech.java<|end_file_name|><|fim▁begin|>package com.ciandt.techgallery.servlets; import java.io.IOException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; <|fim▁hole|> public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { String urlPage = "/viewTech.html"; if(!req.getQueryString().isEmpty()){ urlPage += "?" + req.getQueryString(); } resp.setContentType("text/html"); resp.sendRedirect(urlPage); } }<|fim▁end|>
@SuppressWarnings("serial") public class ViewTech extends HttpServlet { @Override
<|file_name|>BearTestHelper.py<|end_file_name|><|fim▁begin|>import logging logging.warning('This module is deprecated. Use ' '`coalib.testing.BearTestHelper` instead.') <|fim▁hole|><|fim▁end|>
from coalib.testing.BearTestHelper import *
<|file_name|>typeobject.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 Daniel Grunwald // // Permission is hereby granted, free of charge, to any person obtaining a copy of this // software and associated documentation files (the "Software"), to deal in the Software // without restriction, including without limitation the rights to use, copy, modify, merge, // publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons // to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or // substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, // INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR // PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE // FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. use python::{Python, PythonObject, ToPythonPointer};<|fim▁hole|>use err::{PyResult, result_from_owned_ptr}; use ffi; /// Represents a reference to a Python type object. pub struct PyType<'p>(PyObject<'p>); pyobject_newtype!(PyType, PyType_Check, PyType_Type); impl <'p> PyType<'p> { /// Retrieves the underlying FFI pointer associated with this Python object. #[inline] pub fn as_type_ptr(&self) -> *mut ffi::PyTypeObject { self.0.as_ptr() as *mut ffi::PyTypeObject } /// Retrieves the PyType instance for the given FFI pointer. /// Undefined behavior if the pointer is NULL or invalid. #[inline] pub unsafe fn from_type_ptr<'a>(py: Python<'p>, p: *mut ffi::PyTypeObject) -> PyType<'p> { PyObject::from_borrowed_ptr(py, p as *mut ffi::PyObject).unchecked_cast_into::<PyType>() } /// Return true if self is a subtype of b. #[inline] pub fn is_subtype_of(&self, b : &PyType<'p>) -> bool { unsafe { ffi::PyType_IsSubtype(self.as_type_ptr(), b.as_type_ptr()) != 0 } } /// Return true if obj is an instance of self. #[inline] pub fn is_instance(&self, obj : &PyObject<'p>) -> bool { unsafe { ffi::PyObject_TypeCheck(obj.as_ptr(), self.as_type_ptr()) != 0 } } /// Calls the type object, thus creating a new instance. /// This is equivalent to the Python expression: `self(*args, **kwargs)` #[inline] pub fn call<A>(&self, args: A, kwargs: Option<&PyDict<'p>>) -> PyResult<'p, PyObject<'p>> where A: ToPyObject<'p, ObjectType=PyTuple<'p>> { let py = self.python(); args.with_borrowed_ptr(py, |args| unsafe { result_from_owned_ptr(py, ffi::PyObject_Call(self.0.as_ptr(), args, kwargs.as_ptr())) }) } } impl <'p> PartialEq for PyType<'p> { #[inline] fn eq(&self, o : &PyType<'p>) -> bool { self.as_type_ptr() == o.as_type_ptr() } } impl <'p> Eq for PyType<'p> { }<|fim▁end|>
use conversion::ToPyObject; use objects::{PyObject, PyTuple, PyDict};
<|file_name|>paramSplitFieldBoth.py<|end_file_name|><|fim▁begin|>''' Created on Oct 3, 2012 Copyright © 2013 The Board of Trustees of The Leland Stanford Junior University. All Rights Reserved Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. @author: dstrauss<|fim▁hole|> D = {'solverType':'splitField', 'flavor':'both', 'numRuns':500, 'expt':'intParameters', 'numProcs':16} def getMyVars(parseNumber, D): '''routine to return the parameters to test at the current iteration.''' rhos, xis = np.meshgrid(np.logspace(2,4,10), np.logspace(-4,-2,10)) rhos = rhos.flatten() xis = xis.flatten() noFreqs = np.array(8) bkg = 0 D['freqs'] = np.round(np.logspace(np.log10(1000), np.log10(50000), noFreqs)) D['inc'] = np.array([45*np.pi/180.0]) D['rho'] = rhos[parseNumber%100] D['xi'] = xis[parseNumber%100] D['bkgNo'] = int(parseNumber/100) + 100 return D<|fim▁end|>
''' import numpy as np
<|file_name|>ESStoreTest.java<|end_file_name|><|fim▁begin|>package com.winterwell.depot; import java.util.Map; import org.junit.Test; import com.winterwell.depot.merge.Merger; import com.winterwell.es.client.ESConfig; import com.winterwell.es.client.ESHttpClient; import com.winterwell.gson.FlexiGson; import com.winterwell.utils.Dep;<|fim▁hole|>public class ESStoreTest { @Test public void testSimple() { Dep.setIfAbsent(FlexiGson.class, new FlexiGson()); Dep.setIfAbsent(Merger.class, new Merger()); Dep.setIfAbsent(ESConfig.class, new ESConfig()); ESConfig esconfig = Dep.get(ESConfig.class); if ( ! Dep.has(ESHttpClient.class)) Dep.setSupplier(ESHttpClient.class, false, ESHttpClient::new); Dep.setIfAbsent(DepotConfig.class, new DepotConfig()); ArrayMap artifact = new ArrayMap("a", "apple", "b", "bee"); Desc desc = new Desc("test-simple", Map.class); ESDepotStore store = new ESDepotStore(); store.init(); store.put(desc, artifact); Utils.sleep(1500); Object got = store.get(desc); assert Utils.equals(artifact, got) : got; } }<|fim▁end|>
import com.winterwell.utils.Utils; import com.winterwell.utils.containers.ArrayMap;
<|file_name|>Decade.js<|end_file_name|><|fim▁begin|>'use strict'; var babelHelpers = require('./util/babelHelpers.js'); var React = require('react'), _ = require('./util/_'), cx = require('classnames'), dates = require('./util/dates'), localizers = require('./util/configuration').locale, CustomPropTypes = require('./util/propTypes'), Btn = require('./WidgetButton'); var format = function format(props) { return props.yearFormat || localizers.date.formats.year; }; module.exports = React.createClass({ displayName: 'DecadeView', mixins: [require('./mixins/WidgetMixin'), require('./mixins/PureRenderMixin'), require('./mixins/RtlChildContextMixin')], propTypes: { culture: React.PropTypes.string, value: React.PropTypes.instanceOf(Date), focused: React.PropTypes.instanceOf(Date), min: React.PropTypes.instanceOf(Date), max: React.PropTypes.instanceOf(Date), onChange: React.PropTypes.func.isRequired, yearFormat: CustomPropTypes.dateFormat }, render: function render() { var props = _.omit(this.props, ['max', 'min', 'value', 'onChange']), years = getDecadeYears(this.props.focused), rows = _.chunk(years, 4); return React.createElement( 'table',<|fim▁hole|> className: 'rw-calendar-grid rw-nav-view', 'aria-activedescendant': this._id('_selected_item') }), React.createElement( 'tbody', null, rows.map(this._row) ) ); }, _row: function _row(row, i) { var _this = this; var id = this._id('_selected_item'); return React.createElement( 'tr', { key: 'row_' + i, role: 'row' }, row.map(function (date, i) { var focused = dates.eq(date, _this.props.focused, 'year'), selected = dates.eq(date, _this.props.value, 'year'), currentYear = dates.eq(date, _this.props.today, 'year'); return !dates.inRange(date, _this.props.min, _this.props.max, 'year') ? React.createElement( 'td', { key: i, role: 'gridcell', className: 'rw-empty-cell' }, ' ' ) : React.createElement( 'td', { key: i, role: 'gridcell' }, React.createElement( Btn, { onClick: _this.props.onChange.bind(null, date), tabIndex: '-1', id: focused ? id : undefined, 'aria-pressed': selected, 'aria-disabled': _this.props.disabled, disabled: _this.props.disabled || undefined, className: cx({ 'rw-off-range': !inDecade(date, _this.props.focused), 'rw-state-focus': focused, 'rw-state-selected': selected, 'rw-now': currentYear }) }, localizers.date.format(date, format(_this.props), _this.props.culture) ) ); }) ); } }); function inDecade(date, start) { return dates.gte(date, dates.startOf(start, 'decade'), 'year') && dates.lte(date, dates.endOf(start, 'decade'), 'year'); } function getDecadeYears(_date) { var days = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], date = dates.add(dates.startOf(_date, 'decade'), -2, 'year'); return days.map(function () { return date = dates.add(date, 1, 'year'); }); } //require('./mixins/DateFocusMixin')('decade', 'year')<|fim▁end|>
babelHelpers._extends({}, props, { role: 'grid',
<|file_name|>test.py<|end_file_name|><|fim▁begin|>"""Tests if it "compiles" (except that it doesn't). Also: trivial usage example. """ from da import Node, Network import topo class MyNode(Node): def run(self): self.send(0, self.ID) self.send(1, self.ID) p, m = self.recv() p, m = self.recv() self.log('terminating', m, p) <|fim▁hole|> if __name__ == '__main__': run(47)<|fim▁end|>
def run(n): net = Network(MyNode, topo.C(n)) net.run()
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin from .models import User from application.models import (Contact, Personal, Wife, Occupation, Children, Hod, Committee, UserCommittee, Legal) # Register your models here. class ContactInline(admin.StackedInline): model = Contact class PersonalInline(admin.StackedInline): model = Personal class WifeInline(admin.StackedInline): model = Wife class OccupationInline(admin.StackedInline): model = Occupation class HodInline(admin.StackedInline):<|fim▁hole|> class ChildrenInline(admin.StackedInline): model = Children class UserCommitteeInline(admin.StackedInline): model = UserCommittee class UserAdmin(admin.ModelAdmin): inlines = [ ContactInline, PersonalInline, WifeInline, OccupationInline, HodInline, ChildrenInline, UserCommitteeInline ] class LegalAdmin(admin.ModelAdmin): model = Legal admin.site.register(User, UserAdmin) admin.site.register(Legal, LegalAdmin) admin.site.site_header = 'Hebrew Order of David Administration'<|fim▁end|>
model = Hod
<|file_name|>add_to_a_path.py<|end_file_name|><|fim▁begin|>"""Script to display a collection of paths after inserting one new path Usage: add_to_a_path.py [-U] PATHS PATH add_to_a_path.py [-U] (-s | -i INDEX ) PATHS PATH Options: -h, --help Show this help and exit -v, --version Show version number and exit -s, --start Add the path at start of list of paths -i INDEX, --index=INDEX The index at which the path will be inserted Examples of use: $ export PATH=/bin:/usr/bin $ add_to_a_path.py PATH /usr/local/bin PATH=/bin:/usr/bin:/usr/local/bin $ add_to_a_path.py PATH /usr/local/bin --start PATH=/usr/local/bin:/bin:/usr/bin """ from __future__ import print_function<|fim▁hole|>import sys import argparse from bdb import BdbQuit __version__ = '0.1.0' class ScriptError(NotImplementedError): pass def version(): print('%s %s' % (args, __version__)) raise SystemExit def parse_args(): """Parse out command line arguments""" parser = argparse.ArgumentParser(description=__doc__.splitlines()[0]) parser.add_argument('symbol', help='The bash symbol to be changed') parser.add_argument('path', help='The path to be added') parser.add_argument('-s', '--start', action='store_true', help='Add the path at start of list of paths') parser.add_argument('-i', '--index', type=int, help='The index at which the path will be inserted') parser.add_argument('-v', '--version', action='store_true', help='Show version') args = parser.parse_args() if args.version: version() if not args.index: if args.start: args.index = 0 else: args.index = False return args def _add_symbol_to_paths(paths, symbol, i): if i is False: i = len(paths) result = paths[:] if not symbol: return result if symbol not in result: result.insert(i, symbol) return result j = result.index(symbol) if i != j: del result[j] result.insert(i, symbol) return result def get_arg_path(args): path = args.path if not path: return '' user_path = os.path.expanduser(path) real_path = os.path.realpath(user_path) if not os.path.isdir(real_path): return '' return real_path def split_paths(string): if not string: return [] return [p for p in string.split(os.path.pathsep) if p] def get_paths(args): symbol = args.symbol paths_string = '' if symbol in os.environ: paths_string = os.environ[symbol] elif os.path.pathsep in symbol: paths_string = symbol return split_paths(paths_string) def script(args): arg_path = get_arg_path(args) paths = get_paths(args) if not arg_path: if not paths: return False elif os.path.isdir(arg_path): if arg_path in paths: paths.remove(arg_path) paths = _add_symbol_to_paths(paths, arg_path, args.index) else: return False print('='.join((args.symbol, os.path.pathsep.join(paths)))) return True def main(): """Run the script""" try: args = parse_args() return os.EX_OK if script(args) else not os.EX_OK except (SystemExit, BdbQuit): pass return os.EX_OK if __name__ == '__main__': sys.exit(main())<|fim▁end|>
import os
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Networking primitives for TCP/UDP communication. #![stable(feature = "rust1", since = "1.0.0")] use prelude::v1::*; use io::{self, Error, ErrorKind}; use sys_common::net as net_imp; pub use self::ip::{IpAddr, Ipv4Addr, Ipv6Addr, Ipv6MulticastScope}; pub use self::addr::{SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs}; pub use self::tcp::{TcpStream, TcpListener, Incoming}; pub use self::udp::UdpSocket; pub use self::parser::AddrParseError; mod ip; mod addr; mod tcp; mod udp; mod parser; #[cfg(test)] mod test; /// Possible values which can be passed to the `shutdown` method of `TcpStream`. #[derive(Copy, Clone, PartialEq, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub enum Shutdown { /// Indicates that the reading portion of this stream/socket should be shut /// down. All currently blocked and future reads will return `Ok(0)`. #[stable(feature = "rust1", since = "1.0.0")] Read, /// Indicates that the writing portion of this stream/socket should be shut /// down. All currently blocked and future writes will return an error. #[stable(feature = "rust1", since = "1.0.0")] Write, /// Shut down both the reading and writing portions of this stream. /// /// See `Shutdown::Read` and `Shutdown::Write` for more information. #[stable(feature = "rust1", since = "1.0.0")] Both, } #[doc(hidden)] trait NetInt { fn from_be(i: Self) -> Self; fn to_be(&self) -> Self; } macro_rules! doit { ($($t:ident)*) => ($(impl NetInt for $t { fn from_be(i: Self) -> Self { <$t>::from_be(i) } fn to_be(&self) -> Self { <$t>::to_be(*self) } })*) } doit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize } fn hton<I: NetInt>(i: I) -> I { i.to_be() } fn ntoh<I: NetInt>(i: I) -> I { I::from_be(i) } fn each_addr<A: ToSocketAddrs, F, T>(addr: A, mut f: F) -> io::Result<T> where F: FnMut(&SocketAddr) -> io::Result<T> { let mut last_err = None; for addr in try!(addr.to_socket_addrs()) { match f(&addr) { Ok(l) => return Ok(l), Err(e) => last_err = Some(e), } } Err(last_err.unwrap_or_else(|| { Error::new(ErrorKind::InvalidInput, "could not resolve to any addresses") })) } /// An iterator over `SocketAddr` values returned from a host lookup operation. #[unstable(feature = "lookup_host", reason = "unsure about the returned \ iterator and returning socket \ addresses", issue = "27705")] pub struct LookupHost(net_imp::LookupHost); #[unstable(feature = "lookup_host", reason = "unsure about the returned \ iterator and returning socket \ addresses", issue = "27705")] impl Iterator for LookupHost { type Item = io::Result<SocketAddr>; fn next(&mut self) -> Option<io::Result<SocketAddr>> { self.0.next() } } /// Resolve the host specified by `host` as a number of `SocketAddr` instances. /// /// This method may perform a DNS query to resolve `host` and may also inspect /// system configuration to resolve the specified hostname. /// /// # Examples /// /// ```no_run /// #![feature(lookup_host)] /// /// use std::net; /// /// # fn foo() -> std::io::Result<()> { /// for host in try!(net::lookup_host("rust-lang.org")) { /// println!("found address: {}", try!(host));<|fim▁hole|>/// # } /// ``` #[unstable(feature = "lookup_host", reason = "unsure about the returned \ iterator and returning socket \ addresses", issue = "27705")] pub fn lookup_host(host: &str) -> io::Result<LookupHost> { net_imp::lookup_host(host).map(LookupHost) } /// Resolve the given address to a hostname. /// /// This function may perform a DNS query to resolve `addr` and may also inspect /// system configuration to resolve the specified address. If the address /// cannot be resolved, it is returned in string format. #[unstable(feature = "lookup_addr", reason = "recent addition", issue = "27705")] pub fn lookup_addr(addr: &IpAddr) -> io::Result<String> { net_imp::lookup_addr(addr) }<|fim▁end|>
/// } /// # Ok(())
<|file_name|>resource.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os.path import shutil import tempfile import unittest from StringIO import StringIO from datetime import datetime import trac.ticket.api import trac.ticket.report import trac.ticket.roadmap import trac.wiki.api from trac import resource from trac.attachment import Attachment from trac.ticket.model import Ticket from trac.util.datefmt import utc from trac.wiki.model import WikiPage from multiproduct.api import MultiProductSystem from multiproduct.env import ProductEnvironment from tests.env import MultiproductTestCase class ProductResourceTestCase(MultiproductTestCase): def setUp(self): self._mp_setup() self.global_env = self.env self._load_product_from_data(self.global_env, u'xü') self.env = ProductEnvironment(self.global_env, self.default_product) self.env1 = ProductEnvironment(self.global_env, u'xü') self._load_default_data(self.global_env) self._load_default_data(self.env1) # Enable product system component in product context self.env.enable_component(MultiProductSystem) def tearDown(self): self.global_env.reset_db() self.global_env = self.env = None class ProductAttachmentResourceTestCase(ProductResourceTestCase): def setUp(self): ProductResourceTestCase.setUp(self) self.global_env.path = os.path.join(tempfile.gettempdir(), 'trac-tempenv') if os.path.exists(self.global_env.path): shutil.rmtree(self.global_env.path) os.mkdir(self.global_env.path) attachment = Attachment(self.global_env, 'ticket', 1) attachment.description = 'Global Bar' attachment.insert('foo.txt', StringIO(''), 0) attachment = Attachment(self.env1, 'ticket', 1) attachment.description = 'Product Bar' attachment.insert('foo.txt', StringIO(''), 0) self.resource = resource.Resource('ticket', 1).child('attachment', 'foo.txt') def tearDown(self): shutil.rmtree(self.global_env.path) ProductResourceTestCase.tearDown(self) def test_global_neighborhood_attachments(self): target = resource.Neighborhood('global', None).child(self.resource) self.assertEquals("[global:] Attachment 'foo.txt' in [global:] Ticket #1", resource.get_resource_description(self.env, target)) self.assertEquals("[global:] Attachment 'foo.txt' in [global:] Ticket #1", resource.get_resource_name(self.env, target)) self.assertEquals("[global:] foo.txt ([global:] Ticket #1)", resource.get_resource_shortname(self.env, target)) self.assertEquals('Global Bar', resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/attachment/ticket/1/foo.txt', resource.get_resource_url(self.env, target, self.env.href)) def test_product_neighborhood_attachments(self): target = resource.Neighborhood('product', u'xü').child(self.resource) self.assertEquals(u"[product:xü] Attachment 'foo.txt' in [product:xü] Ticket #1", resource.get_resource_description(self.env, target)) self.assertEquals(u"[product:xü] Attachment 'foo.txt' in [product:xü] Ticket #1", resource.get_resource_name(self.env, target)) self.assertEquals(u"[product:xü] foo.txt ([product:xü] Ticket #1)", resource.get_resource_shortname(self.env, target)) self.assertEquals('Product Bar', resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/products/x%C3%BC/attachment/ticket/1/foo.txt', resource.get_resource_url(self.env, target, self.env.href)) class ProductMilestoneResourceTestCase(ProductResourceTestCase): resource = resource.Resource('milestone', 'milestone1') def test_global_neighborhood_milestone(self): target = resource.Neighborhood('global', None).child(self.resource) self.assertEquals("[global:] Milestone milestone1", resource.get_resource_description(self.env, target)) self.assertEquals("[global:] Milestone milestone1", resource.get_resource_name(self.env, target)) self.assertEquals("milestone1", resource.get_resource_shortname(self.env, target)) self.assertEquals("[global:] Milestone milestone1", resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/milestone/milestone1', resource.get_resource_url(self.env, target, self.env.href)) def test_product_neighborhood_milestone(self): target = resource.Neighborhood('product', u'xü').child(self.resource) self.assertEquals(u"[product:xü] Milestone milestone1", resource.get_resource_description(self.env, target)) self.assertEquals(u"[product:xü] Milestone milestone1", resource.get_resource_name(self.env, target)) self.assertEquals(u"milestone1", resource.get_resource_shortname(self.env, target)) self.assertEquals(u"[product:xü] Milestone milestone1", resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/products/x%C3%BC/milestone/milestone1', resource.get_resource_url(self.env, target, self.env.href)) # FIXME: No resource manager for reports in core ? class ProductReportResourceTestCase(ProductResourceTestCase): resource = resource.Resource('report', 1) def test_global_neighborhood_report(self): target = resource.Neighborhood('global', None).child(self.resource) self.assertEquals("[global:] report:1", resource.get_resource_description(self.env, target)) self.assertEquals("[global:] report:1", resource.get_resource_name(self.env, target)) self.assertEquals("[global:] report:1", resource.get_resource_shortname(self.env, target)) self.assertEquals('[global:] report:1 at version None', resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/report/1', resource.get_resource_url(self.env, target, self.env.href)) def test_product_neighborhood_report(self): target = resource.Neighborhood('product', u'xü').child(self.resource) self.assertEquals(u"[product:xü] report:1", resource.get_resource_description(self.env, target)) self.assertEquals(u"[product:xü] report:1", resource.get_resource_name(self.env, target)) self.assertEquals(u"[product:xü] report:1", resource.get_resource_shortname(self.env, target)) self.assertEquals(u"[product:xü] report:1 at version None", resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/products/x%C3%BC/report/1', resource.get_resource_url(self.env, target, self.env.href)) class ProductTicketResourceTestCase(ProductResourceTestCase): def _new_ticket(self, env, ticket_dict): ticket = Ticket(env) ticket.populate(ticket_dict) return ticket.insert() def setUp(self): ProductResourceTestCase.setUp(self) def test_global_neighborhood_ticket(self): nbh = resource.Neighborhood('global', None) data = dict(summary='Ticket summary', description='Ticket description', type='enhancement', status='new') target = nbh.child('ticket', self._new_ticket(self.global_env, data)) self.assertEquals("[global:] Ticket #1", resource.get_resource_description(self.env, target)) self.assertEquals("[global:] Ticket #1", resource.get_resource_name(self.env, target)) self.assertEquals("[global:] #1", resource.get_resource_shortname(self.env, target)) self.assertEquals('enhancement: Ticket summary (new)', resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/ticket/1', resource.get_resource_url(self.env, target, self.env.href)) def test_product_neighborhood_ticket(self): nbh = resource.Neighborhood('product', u'xü') data = dict(summary='Ticket summary', description='Ticket description', type='task', status='accepted') target = nbh.child('ticket', self._new_ticket(self.env1, data)) self.assertEquals(u"[product:xü] Ticket #1", resource.get_resource_description(self.env, target)) self.assertEquals(u"[product:xü] Ticket #1", resource.get_resource_name(self.env, target)) self.assertEquals(u"[product:xü] #1", resource.get_resource_shortname(self.env, target)) self.assertEquals(u"task: Ticket summary (accepted)", resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/products/x%C3%BC/ticket/1', resource.get_resource_url(self.env, target, self.env.href)) class ProductWikiResourceTestCase(ProductResourceTestCase): resource = resource.Resource('wiki', 'TestPage', version=2) def setUp(self): ProductResourceTestCase.setUp(self) page = WikiPage(self.global_env) page.name = 'TestPage' page.text = 'Bla bla' t = datetime(2001, 1, 1, 1, 1, 1, 0, utc) page.save('joe', 'Testing global', '::1', t) page.text = 'Bla bla bla' t = datetime(2002, 2, 2, 2, 2, 2, 0, utc) page.save('joe', 'Testing global 2', '::1', t) page = WikiPage(self.env1) page.name = 'TestPage' page.text = 'alb alB' t = datetime(2011, 1, 1, 1, 1, 1, 0, utc) page.save('mary', 'Testing product', '::1', t) page.text = 'Bla bla bla' t = datetime(2012, 2, 2, 2, 2, 2, 0, utc) page.save('mary', 'Testing product 2', '::1', t) def test_global_neighborhood_wiki(self): target = resource.Neighborhood('global', None).child(self.resource) <|fim▁hole|> resource.get_resource_name(self.env, target)) self.assertEquals("TestPage", resource.get_resource_shortname(self.env, target)) self.assertEquals("TestPage", resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/wiki/TestPage?version=2', resource.get_resource_url(self.env, target, self.env.href)) def test_product_neighborhood_wiki(self): target = resource.Neighborhood('product', u'xü').child(self.resource) self.assertEquals(u"TestPage", resource.get_resource_description(self.env, target)) self.assertEquals(u"TestPage", resource.get_resource_name(self.env, target)) self.assertEquals(u"TestPage", resource.get_resource_shortname(self.env, target)) self.assertEquals(u"TestPage", resource.get_resource_summary(self.env, target)) self.assertEquals('http://example.org/trac.cgi/products/x%C3%BC/wiki/TestPage?version=2', resource.get_resource_url(self.env, target, self.env.href)) class NeighborhoodTestCase(MultiproductTestCase): def setUp(self): self._mp_setup() def test_get_known_neighborhoods(self): rsys = resource.ResourceSystem(self.env) self.assertEquals(['global', 'product'], sorted(rsys.get_known_neighborhoods())) def test_suite(): return unittest.TestSuite([ unittest.makeSuite(ProductAttachmentResourceTestCase, 'test'), unittest.makeSuite(ProductMilestoneResourceTestCase, 'test'), unittest.makeSuite(ProductReportResourceTestCase, 'test'), unittest.makeSuite(ProductTicketResourceTestCase, 'test'), unittest.makeSuite(ProductWikiResourceTestCase, 'test'), unittest.makeSuite(NeighborhoodTestCase, 'test'), ]) if __name__ == '__main__': unittest.main(defaultTest='test_suite')<|fim▁end|>
self.assertEquals("TestPage", resource.get_resource_description(self.env, target)) self.assertEquals("TestPage",
<|file_name|>ViewDelegatesFactory.cpp<|end_file_name|><|fim▁begin|>#include "ViewDelegatesFactory.h" #include "CategoryViewDelegate.h" #include "ClientViewDelegate.h" ViewDelegatesFactory* ViewDelegatesFactory:: ViewDelegatesFactory_ = nullptr; ViewDelegatesFactory::ViewDelegatesFactory( QObject * parent) { oAvail["Category"] = new CategoryViewDelegate(parent); oAvail["Client"] = new ClientViewDelegate(parent); } ViewDelegates* ViewDelegatesFactory::Get(QString ViewName, QObject * parent) { if (ViewDelegatesFactory_ == nullptr) ViewDelegatesFactory_ = new ViewDelegatesFactory(parent); return ViewDelegatesFactory_->oAvail[ViewName];<|fim▁hole|>ViewDelegatesFactory:: ~ViewDelegatesFactory() { for(auto it = oAvail.begin(); it != oAvail.end(); ++it) delete it.value(); delete ViewDelegatesFactory_; }<|fim▁end|>
}
<|file_name|>theme.rs<|end_file_name|><|fim▁begin|>extern crate nanovg;<|fim▁hole|> pub struct Theme { font_normal: Option<nanovg::Font>, font_bold: Option<nanovg::Font>, font_icons: Option<nanovg::Font>, standard_font_size: u32, button_font_size: u32, textbox_font_size: u32, window_corner_radius: u32, window_header_height: u32, window_dropshadow_size: u32, button_corner_radius: u32, dropshadow: Color, transparent: Color, border_dark: Color, border_light: Color, border_medium: Color, text_color: Color, disabled_text_color: Color, text_color_shadow: Color, icon_color: Color, button_gradient_top_focused: Color, button_gradient_bot_focused: Color, button_gradient_top_unfocused: Color, button_gradient_bot_unfocused: Color, button_gradient_top_pushed: Color, button_gradient_bot_pushed: Color, window_fill_unfocused: Color, window_fill_focused: Color, window_title_unfocused: Color, window_title_focused: Color, window_header_gradient_top: Color, window_header_gradient_bot: Color, window_header_sep_top: Color, window_header_sep_bot: Color, window_popup: Color, window_popup_transparent: Color, } impl Theme { pub fn new(nanovg_context: &nanovg::Context) -> Theme { Theme { font_normal: Some(nanovg_context.create_font_mem("sans", resources::SANS_FONT).unwrap()), font_bold: Some(nanovg_context.create_font_mem("sans-bold", resources::SANS_BOLD_FONT).unwrap()), font_icons: Some(nanovg_context.create_font_mem("icons", resources::SANS_ICONS_FONT).unwrap()), standard_font_size: 16, button_font_size: 20, textbox_font_size: 20, window_corner_radius: 2, window_header_height: 30, window_dropshadow_size: 10, button_corner_radius: 2, dropshadow: Color::from_intensity(0f32, 128f32), transparent: Color::from_intensity(0f32, 0f32), border_dark: Color::from_intensity(29f32, 255f32), border_light: Color::from_intensity(92f32, 255f32), border_medium: Color::from_intensity(35f32, 255f32), text_color: Color::from_intensity(255f32, 160f32), disabled_text_color: Color::from_intensity(255f32, 80f32), text_color_shadow: Color::from_intensity(0f32, 160f32), icon_color: Color::from_intensity(255f32, 160f32), button_gradient_top_focused: Color::from_intensity(64f32, 255f32), button_gradient_bot_focused: Color::from_intensity(48f32, 255f32), button_gradient_top_unfocused: Color::from_intensity(74f32, 255f32), button_gradient_bot_unfocused: Color::from_intensity(58f32, 255f32), button_gradient_top_pushed: Color::from_intensity(41f32, 255f32), button_gradient_bot_pushed: Color::from_intensity(29f32, 255f32), window_fill_unfocused: Color::from_intensity(43f32, 230f32), window_fill_focused: Color::from_intensity(45f32, 230f32), window_title_unfocused: Color::from_intensity(220f32, 160f32), window_title_focused: Color::from_intensity(255f32, 190f32), window_header_gradient_top: Color::from_intensity(74f32, 255f32), window_header_gradient_bot: Color::from_intensity(58f32, 255f32), window_header_sep_top: Color::from_intensity(92f32, 255f32), window_header_sep_bot: Color::from_intensity(29f32, 255f32), window_popup: Color::from_intensity(50f32, 255f32), window_popup_transparent: Color::from_intensity(50f32, 0f32), } } pub fn new_debug() -> Theme { Theme { font_normal: None, font_bold: None, font_icons: None, standard_font_size: 16, button_font_size: 20, textbox_font_size: 20, window_corner_radius: 2, window_header_height: 30, window_dropshadow_size: 10, button_corner_radius: 2, dropshadow: Color::from_intensity(0f32, 128f32), transparent: Color::from_intensity(0f32, 0f32), border_dark: Color::from_intensity(29f32, 255f32), border_light: Color::from_intensity(92f32, 255f32), border_medium: Color::from_intensity(35f32, 255f32), text_color: Color::from_intensity(255f32, 160f32), disabled_text_color: Color::from_intensity(255f32, 80f32), text_color_shadow: Color::from_intensity(0f32, 160f32), icon_color: Color::from_intensity(255f32, 160f32), button_gradient_top_focused: Color::from_intensity(64f32, 255f32), button_gradient_bot_focused: Color::from_intensity(48f32, 255f32), button_gradient_top_unfocused: Color::from_intensity(74f32, 255f32), button_gradient_bot_unfocused: Color::from_intensity(58f32, 255f32), button_gradient_top_pushed: Color::from_intensity(41f32, 255f32), button_gradient_bot_pushed: Color::from_intensity(29f32, 255f32), window_fill_unfocused: Color::from_intensity(43f32, 230f32), window_fill_focused: Color::from_intensity(45f32, 230f32), window_title_unfocused: Color::from_intensity(220f32, 160f32), window_title_focused: Color::from_intensity(255f32, 190f32), window_header_gradient_top: Color::from_intensity(74f32, 255f32), window_header_gradient_bot: Color::from_intensity(58f32, 255f32), window_header_sep_top: Color::from_intensity(92f32, 255f32), window_header_sep_bot: Color::from_intensity(29f32, 255f32), window_popup: Color::from_intensity(50f32, 255f32), window_popup_transparent: Color::from_intensity(50f32, 0f32), } } pub fn font_normal(&self) -> &nanovg::Font { match self.font_normal { Some(ref val) => val, None => panic!("debug mode") } } pub fn set_font_normal(&mut self, new_val: nanovg::Font) { self.font_normal = Some(new_val); } pub fn font_bold(&self) -> &nanovg::Font { match self.font_bold { Some(ref val) => val, None => panic!("debug mode") } } pub fn set_font_bold(&mut self, new_val: nanovg::Font) { self.font_bold = Some(new_val); } pub fn font_icons(&self) -> &nanovg::Font { match self.font_icons { Some(ref val) => val, None => panic!("debug mode") } } pub fn set_font_icons(&mut self, new_val: nanovg::Font) { self.font_icons = Some(new_val); } impl_get_set!(standard_font_size, u32); impl_get_set!(button_font_size, u32); impl_get_set!(textbox_font_size, u32); impl_get_set!(window_corner_radius, u32); impl_get_set!(window_header_height, u32); impl_get_set!(window_dropshadow_size, u32); impl_get_set!(button_corner_radius, u32); impl_get_set!(dropshadow, Color); impl_get_set!(transparent, Color); impl_get_set!(border_dark, Color); impl_get_set!(border_light, Color); impl_get_set!(border_medium, Color); impl_get_set!(text_color, Color); impl_get_set!(disabled_text_color, Color); impl_get_set!(text_color_shadow, Color); impl_get_set!(icon_color, Color); impl_get_set!(button_gradient_top_focused, Color); impl_get_set!(button_gradient_bot_focused, Color); impl_get_set!(button_gradient_top_unfocused, Color); impl_get_set!(button_gradient_bot_unfocused, Color); impl_get_set!(button_gradient_top_pushed, Color); impl_get_set!(button_gradient_bot_pushed, Color); impl_get_set!(window_fill_unfocused, Color); impl_get_set!(window_fill_focused, Color); impl_get_set!(window_title_unfocused, Color); impl_get_set!(window_title_focused, Color); impl_get_set!(window_header_gradient_top, Color); impl_get_set!(window_header_gradient_bot, Color); impl_get_set!(window_header_sep_top, Color); impl_get_set!(window_header_sep_bot, Color); impl_get_set!(window_popup, Color); impl_get_set!(window_popup_transparent, Color); }<|fim▁end|>
use common::{Color}; use resources;
<|file_name|>scrollable.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk. // // rgtk is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // rgtk is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with rgtk. If not, see <http://www.gnu.org/licenses/>. //! An interface for scrollable widgets use gtk::cast::GTK_SCROLLABLE; use gtk::{mod, ffi}; /// GtkScrollable — An interface for scrollable widgets pub trait ScrollableTrait: gtk::WidgetTrait { fn get_hadjustment(&self) -> gtk::Adjustment { unsafe { gtk::Adjustment::wrap_pointer(ffi::gtk_scrollable_get_hadjustment(GTK_SCROLLABLE(self.get_widget()))) } } fn set_hadjustment(&mut self, hadjustment: gtk::Adjustment) { unsafe { ffi::gtk_scrollable_set_hadjustment(GTK_SCROLLABLE(self.get_widget()), hadjustment.get_pointer()) } } fn get_vadjustment(&self) -> gtk::Adjustment { unsafe { gtk::Adjustment::wrap_pointer(ffi::gtk_scrollable_get_vadjustment(GTK_SCROLLABLE(self.get_widget()))) } } fn set_vadjustment(&mut self, vadjustment: gtk::Adjustment) { unsafe { ffi::gtk_scrollable_set_vadjustment(GTK_SCROLLABLE(self.get_widget()), vadjustment.get_pointer()) } } fn get_hscroll_policy(&self) -> gtk::ScrollablePolicy { unsafe { ffi::gtk_scrollable_get_hscroll_policy(GTK_SCROLLABLE(self.get_widget())) } } fn set_hscroll_policy(&mut self, policy: gtk::ScrollablePolicy) { unsafe { ffi::gtk_scrollable_set_hscroll_policy(GTK_SCROLLABLE(self.get_widget()),<|fim▁hole|> fn get_vscroll_policy(&self) -> gtk::ScrollablePolicy { unsafe { ffi::gtk_scrollable_get_vscroll_policy(GTK_SCROLLABLE(self.get_widget())) } } fn set_vscroll_policy(&mut self, policy: gtk::ScrollablePolicy) { unsafe { ffi::gtk_scrollable_set_vscroll_policy(GTK_SCROLLABLE(self.get_widget()), policy) } } }<|fim▁end|>
policy) } }
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import user_passes_test from django.contrib.auth.models import Group def group_required(names, login_url=None): """ Checks if the user is a member of a particular group (or at least one group from the list)<|fim▁hole|> names = [names] return user_passes_test(lambda u: u.groups.filter(name__in=names), login_url=login_url)<|fim▁end|>
""" if not hasattr(names,'__iter__'):
<|file_name|>eps.hpp<|end_file_name|><|fim▁begin|>//============================================================================== // Copyright 2003 - 2011 LASMEA UMR 6602 CNRS/Univ. Clermont II // Copyright 2009 - 2011 LRI UMR 8623 CNRS/Univ Paris Sud XI // // Distributed under the Boost Software License, Version 1.0. // See accompanying file LICENSE.txt or copy at // http://www.boost.org/LICENSE_1_0.txt //============================================================================== #ifndef NT2_IEEE_FUNCTIONS_EPS_HPP_INCLUDED #define NT2_IEEE_FUNCTIONS_EPS_HPP_INCLUDED #include <boost/simd/ieee/include/functions/eps.hpp> #include <nt2/include/functor.hpp> /* Automatically generated for module core.base */<|fim▁hole|> namespace nt2 { namespace tag { #ifdef DOXYGEN_ONLY /*! \brief Same as \classref{boost::simd::tag::eps_} **/ struct eps_ {}; #endif using boost::simd::tag::eps_; } #ifdef DOXYGEN_ONLY /*! \brief Same as \funcref{boost::simd::eps} **/ template<class... Args> details::unspecified eps(Args&&... args); #endif using boost::simd::eps; } #endif<|fim▁end|>
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.test import TestCase as DjangoTestCase from django.conf import settings from seeder.models import * from seeder.posters import TwitterPoster from random import randint as random from datetime import datetime import time import mox import re def generate_random_authorized_account(): u = User(username = "foo" + str(random(10000, 99999))) u.save() return AuthorizedAccount.objects.create(user = u) def generate_random_seeder(account = None): if account is None: account = generate_random_authorized_account() return Seeder.objects.create( twitter_id = random(1000, 9999), authorized_for = account ) def generate_random_token(seeder = None): if seeder is None: seeder = generate_random_seeder() return Token.objects.create( seeder = seeder, oauth_token = "some token" + str(random(10, 100)), oauth_token_secret = "some token secret" + str(random(10, 100)) ) def generate_random_update(account = None): if account is None: account = generate_random_authorized_account() return Update.objects.create( posted_by = account, original_text = "Hello from Seeder!" ) def generate_mock_poster(update): poster = mox.MockObject(TwitterPoster) poster.post(update) mox.Replay(poster) return poster class TestCase(DjangoTestCase): def assertPubDateBetween(self, obj, begin, end): self.assertTrue(obj.pub_date > begin and obj.pub_date < end) def tearDown(self): models = (AuthorizedAccount, Token, Seeder, Update, SeededUpdate,) for model in models: [obj.delete() for obj in model.objects.all()] class TestOfSeededUpate(TestCase): def test_has_a_future_timestamp(self): foo = SeededUpdate.objects.create( seeder = generate_random_seeder(), update = generate_random_update() ) self.assertTrue(datetime.now() < foo.pub_date) def test_retrieves_updates_based_on_availability(self): first = SeededUpdate.objects.create( seeder = generate_random_seeder(), update = generate_random_update(), pub_date = datetime.now() ) second = SeededUpdate.objects.create( seeder = generate_random_seeder(), update = generate_random_update(), pub_date = datetime.fromtimestamp(time.time() + 1) ) self.assertEqual(1, len(SeededUpdate.objects.currently_available())) time.sleep(1.1) self.assertEqual(2, len(SeededUpdate.objects.currently_available())) def test_retrieves_updates_that_havenot_been_sent(self): first = SeededUpdate.objects.create( seeder = generate_random_seeder(), update = generate_random_update(), pub_date = datetime.now() ) second = SeededUpdate.objects.create( seeder = generate_random_seeder(), update = generate_random_update(), pub_date = datetime.now() ) self.assertEqual(2, len(SeededUpdate.objects.currently_available())) first.has_sent = 1; first.save() self.assertEqual(1, len(SeededUpdate.objects.currently_available())) def test_send_calls_on_poster(self): update = SeededUpdate.objects.create( seeder = generate_random_seeder(), update = generate_random_update() ) poster = generate_mock_poster(update) update.send(poster) mox.Verify(poster) def test_send_marks_updates_as_sent(self): update = SeededUpdate.objects.create( seeder = generate_random_seeder(), update = generate_random_update(), pub_date = datetime.now() ) self.assertEqual(len(SeededUpdate.objects.currently_available()), 1, "sanity check to ensure value seeded update is present") update.send(generate_mock_poster(update)) self.assertEqual(len(SeededUpdate.objects.currently_available()), 0, "SeededUpdate should not be available after being sent") class TestOfUpdate(TestCase): def test_creates_seeded_updates_on_save(self): # sanity check self.assertEqual(0, len(SeededUpdate.objects.all())) a = generate_random_authorized_account() [generate_random_seeder(a) for i in range(10)] update = Update.objects.create( posted_by = a, original_text = "Hello from Seeder!" ) self.assertEqual(10, len(SeededUpdate.objects.all())) def test_all_seeded_updates_have_pub_dates_between_1_and_30_minutes(self): a = generate_random_authorized_account() generate_random_seeder(a) update = Update.objects.create( posted_by = a, original_text = "Hello from Seeder!" ) seeded_update = SeededUpdate.objects.get(update = update) # only uses 59 seconds to avoid possible race condition where # more than a second elapses between creation and the time this # test runs begin_datetime = datetime.fromtimestamp(time.time() + 59) end_datetime = datetime.fromtimestamp(time.time() + (60 * 30) + 1) self.assertPubDateBetween(seeded_update, begin_datetime, end_datetime) def test_only_creates_new_seeded_updates_on_new(self): a = generate_random_authorized_account() generate_random_seeder(a) update = generate_random_update(a) self.assertEqual(len(SeededUpdate.objects.all()), 1, "Sanity check") update.save() self.assertEqual(len(SeededUpdate.objects.all()), 1, "Should only create SeededUpdates on save when new") def test_only_creates_for_non_expired_seeders(self): a = generate_random_authorized_account() s1 = generate_random_seeder(a) s2 = generate_random_seeder(a) s2.set_expires_on_in_days(-1) s2.save() update = generate_random_update(a) self.assertEquals(len(SeededUpdate.objects.all()), 1, "should only create one SeededUpdate since on has expired") class TestOfAuthorizedAccount(TestCase): def test_default_account_returns_default_account(self): a = generate_random_authorized_account() a.twitter_id = settings.SEEDER['default_twitter_id'] a.save() default_account = AuthorizedAccount.objects.default_account() self.assertEqual(settings.SEEDER['default_twitter_id'], default_account.twitter_id) def test_only_pulls_seeders_that_have_not_expired(self): a = generate_random_authorized_account() s = generate_random_seeder(a) self.assertEquals(len(a.seeder_set.currently_available()), 1, "sanity check: seeder_set.currently_available() should be one") s.expires_on = datetime.fromtimestamp(time.time() - 60) s.save() self.assertEquals(len(a.seeder_set.currently_available()), 0, "seeder_set.currently_available() should have no seeders") class TestOfSeeder(TestCase): def test_automatically_expires_in_30_days(self): seeder = generate_random_seeder() expected_expires_on = datetime.fromtimestamp(time.time() + 60*60*24*30).date() self.assertEquals(seeder.expires_on.date(), expected_expires_on, "seeder.expires_on should default to 30 days") def test_can_set_by_expires_by_day(self): seeder = generate_random_seeder() seeder.set_expires_on_in_days(7) self.assertEquals(seeder.expires_on.date(), datetime.fromtimestamp(time.time() + 60*60*24*7).date(), "seeder.expires_on should be 7 days in the future") def test_can_take_a_string_as_parameter(self): seeder = generate_random_seeder() try: seeder.set_expires_on_in_days("7") except TypeError: self.fail("seeder.set_expires_on_in_days() unable to handle a string") def generate_mock_settings(): return mox.MockObject(settings) class StubTwitterApi(object): number_of_calls = 0 calls = [] def __init__(self, *args, **kwargs): StubTwitterApi.number_of_calls += 1 def __getattribute__(self, method): StubTwitterApi.calls.append(method) return self def __call__(self, *args, **kwargs): last_call = StubTwitterApi.calls.pop() StubTwitterApi.calls.append({ "name": last_call, "args": args, "kwargs": kwargs, }) class SanityTestOfStubTwitterApi(TestCase): def setUp(self): super(SanityTestOfStubTwitterApi, self).setUp() StubTwitterApi.number_of_calls = 0 def test_sanity_check(self): obj1 = StubTwitterApi() self.assertEqual(StubTwitterApi.number_of_calls, 1) obj2 = StubTwitterApi() self.assertEqual(StubTwitterApi.number_of_calls, 2) obj3 = StubTwitterApi() self.assertEqual(StubTwitterApi.number_of_calls, 3) def test_keeps_track_of_calls(self): obj = StubTwitterApi() obj.foobar() self.assertEqual(len(StubTwitterApi.calls), 1) def test_keeps_track_of_parameters_passed_in_to_methods(self): obj = StubTwitterApi() number = random(10, 100) obj.foobar(number) data = StubTwitterApi.calls.pop() self.assertEquals(data['args'], (number,)) def generate_full_update(number_of_seeders): account = generate_random_authorized_account() [generate_random_token(generate_random_seeder(account)) for i in range(number_of_seeders)] update = generate_random_update(account) return update class StubSettingsForTwitterApi(object): TWITTER = { "CONSUMER_KEY": "foobar", "CONSUMER_SECRET": "barfoo", } <|fim▁hole|> class TestOfTwitterPoster(TestCase): def setUp(self): super(TestOfTwitterPoster, self).setUp() StubTwitterApi.number_of_calls = 0 StubTwitterApi.calls = [] def test_encapsulates_post_in_template_string(self): settings = StubSettingsForTwitterApi() random_prefix = "random %d" % random(10, 100) settings.TWITTER["POST_TEMPLATE"] = "%s: %%s" % random_prefix u = generate_full_update(1) poster = TwitterPoster(api_class = StubTwitterApi, settings = settings) poster.post(u.seededupdate_set.all()[0]) for data in StubTwitterApi.calls: if data['name'] == 'PostUpdate': break (posted_status,) = data['args'] expected_status = "%s: .*" % random_prefix self.assertTrue( re.compile(expected_status).match(posted_status) is not None ) def test_instantiates_new_api_class_for_each_token(self): number_of_seeders = random(2, 10) u = generate_full_update(number_of_seeders) poster = TwitterPoster(api_class = StubTwitterApi) [seeded_update.send(poster) for seeded_update in u.seededupdate_set.all()] self.assertEquals(StubTwitterApi.number_of_calls, number_of_seeders) def assertSetSourceCalledWith(self, value): for data in StubTwitterApi.calls: if data["name"] == "SetSource": break self.assertEquals((value,), data["args"]) def test_sets_source_to_seeder_if_not_configured(self): u = generate_full_update(1) poster = TwitterPoster(api_class = StubTwitterApi) poster.post(u.seededupdate_set.all()[0]) self.assertSetSourceCalledWith("seeder") def test_sets_source_to_configured_value(self): settings = StubSettingsForTwitterApi() random_source = "random value: " + str(random(10, 100)) settings.TWITTER["SOURCE"] = random_source u = generate_full_update(1) poster = TwitterPoster(api_class = StubTwitterApi, settings = settings) poster.post(u.seededupdate_set.all()[0]) self.assertSetSourceCalledWith(random_source)<|fim▁end|>
<|file_name|>macro_example.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate nickel; extern crate regex; extern crate rustc_serialize; extern crate hyper; use std::io::Write; use nickel::status::StatusCode::{self, NotFound}; use nickel::{ Nickel, NickelError, Continue, Halt, Request, Response, MediaType, QueryString, JsonBody, StaticFilesHandler, MiddlewareResult, HttpRouter, Action }; use regex::Regex; use hyper::header::Location; #[derive(RustcDecodable, RustcEncodable)] struct Person { firstname: String, lastname: String, } //this is an example middleware function that just logs each request fn logger<'a>(request: &mut Request, response: Response<'a>) -> MiddlewareResult<'a> { println!("logging request: {:?}", request.origin.uri); Ok(Continue(response)) } //this is how to overwrite the default error handler to handle 404 cases with a custom view fn custom_404<'a>(err: &mut NickelError, _req: &mut Request) -> Action { if let Some(ref mut res) = err.stream { if res.status() == NotFound { let _ = res.write_all(b"<h1>Call the police!</h1>"); return Halt(()) } } Continue(()) } fn main() { let mut server = Nickel::new(); // middleware is optional and can be registered with `utilize` server.utilize(logger); // go to http://localhost:6767/thoughtram_logo_brain.png to see static file serving in action server.utilize(StaticFilesHandler::new("examples/assets/")); let hello_regex = Regex::new("/hello/(?P<name>[a-zA-Z]+)").unwrap(); // The return type for a route can be anything that implements `Responder` server.utilize(router!( // go to http://localhost:6767/user/4711 to see this route in action get "/user/:userid" => |request| { // returning a String format!("This is user: {}", request.param("userid").unwrap()) } // go to http://localhost:6767/no_alloc/4711 to see this route in action get "/no_alloc/:userid" => |request, response| { // returning a slice of T where T: Display &["This is user: ", request.param("userid").unwrap()][..] } // go to http://localhost:6767/bar to see this route in action get "/bar" => { // returning a http status code and a static string (200u16, "This is the /bar handler") } // go to http://localhost:6767/content-type to see this route in action get "/content-type" => |_, mut response| { response.set(MediaType::Json); "{'foo':'bar'}" } // go to http://localhost:6767/hello/moomah to see this route in action get hello_regex => |request| { format!("Hello {}", request.param("name").unwrap()) } // go to http://localhost:6767/redirect to see this route in action get "/redirect" => |_, mut response| { response.set(Location("http://nickel.rs".into())); StatusCode::PermanentRedirect } // go to http://localhost:6767/private to see this route in action get "/private" => { // returning a typed http status and a response body (StatusCode::Unauthorized, "This is a private place") } // go to http://localhost:6767/some/crazy/route to see this route in action get "/some/*/route" => { // returning a static string "This matches /some/crazy/route but not /some/super/crazy/route" } // go to http://localhost:6767/a/some/crazy/route to see this route in action get "/a/**/route" => { "This matches /a/crazy/route and also /a/super/crazy/route" } // try it with curl // curl 'http://localhost:6767/a/post/request' -H 'Content-Type: application/json;charset=UTF-8' --data-binary $'{ "firstname": "John","lastname": "Connor" }' post "/a/post/request" => |request| { let person = request.json_as::<Person>().unwrap(); format!("Hello {} {}", person.firstname, person.lastname) } // try calling http://localhost:6767/query?foo=bar get "/query" => |request| { let query = request.query(); let foo = query.get("foo").unwrap_or("This is only a default value"); let bar = query.get("bar").unwrap_or("This is only a default value");<|fim▁hole|> let text = format!("<p>Your foo values in the query string are: {:?}\ <p>Your bar values are: {:?}", foo, bar); text } )); // issue #20178 let custom_handler: fn(&mut NickelError, &mut Request) -> Action = custom_404; server.handle_error(custom_handler); println!("Running server!"); server.listen("127.0.0.1:6767"); }<|fim▁end|>
<|file_name|>io.rs<|end_file_name|><|fim▁begin|>//! SOLID-specific extensions to general I/O primitives #![deny(unsafe_op_in_unsafe_fn)] #![unstable(feature = "solid_ext", issue = "none")] use crate::net; use crate::sys; use crate::sys_common::{self, AsInner, FromInner, IntoInner}; /// Raw file descriptors. pub type RawFd = i32; /// A trait to extract the raw SOLID Sockets file descriptor from an underlying /// object. pub trait AsRawFd { /// Extracts the raw file descriptor. /// /// This method does **not** pass ownership of the raw file descriptor /// to the caller. The descriptor is only guaranteed to be valid while /// the original object has not yet been destroyed. fn as_raw_fd(&self) -> RawFd; } /// A trait to express the ability to construct an object from a raw file /// descriptor. pub trait FromRawFd { /// Constructs a new instance of `Self` from the given raw file /// descriptor. /// /// This function **consumes ownership** of the specified file /// descriptor. The returned object will take responsibility for closing /// it when the object goes out of scope. /// /// This function is also unsafe as the primitives currently returned /// have the contract that they are the sole owner of the file /// descriptor they are wrapping. Usage of this function could<|fim▁hole|> /// unsafety in code that relies on it being true. unsafe fn from_raw_fd(fd: RawFd) -> Self; } /// A trait to express the ability to consume an object and acquire ownership of /// its raw file descriptor. pub trait IntoRawFd { /// Consumes this object, returning the raw underlying file descriptor. /// /// This function **transfers ownership** of the underlying file descriptor /// to the caller. Callers are then the unique owners of the file descriptor /// and must close the descriptor once it's no longer needed. fn into_raw_fd(self) -> RawFd; } #[stable(feature = "raw_fd_reflexive_traits", since = "1.48.0")] impl AsRawFd for RawFd { #[inline] fn as_raw_fd(&self) -> RawFd { *self } } #[stable(feature = "raw_fd_reflexive_traits", since = "1.48.0")] impl IntoRawFd for RawFd { #[inline] fn into_raw_fd(self) -> RawFd { self } } #[stable(feature = "raw_fd_reflexive_traits", since = "1.48.0")] impl FromRawFd for RawFd { #[inline] unsafe fn from_raw_fd(fd: RawFd) -> RawFd { fd } } macro_rules! impl_as_raw_fd { ($($t:ident)*) => {$( #[stable(feature = "rust1", since = "1.0.0")] impl AsRawFd for net::$t { #[inline] fn as_raw_fd(&self) -> RawFd { *self.as_inner().socket().as_inner() } } )*}; } impl_as_raw_fd! { TcpStream TcpListener UdpSocket } macro_rules! impl_from_raw_fd { ($($t:ident)*) => {$( #[stable(feature = "from_raw_os", since = "1.1.0")] impl FromRawFd for net::$t { #[inline] unsafe fn from_raw_fd(fd: RawFd) -> net::$t { let socket = sys::net::Socket::from_inner(fd); net::$t::from_inner(sys_common::net::$t::from_inner(socket)) } } )*}; } impl_from_raw_fd! { TcpStream TcpListener UdpSocket } macro_rules! impl_into_raw_fd { ($($t:ident)*) => {$( #[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawFd for net::$t { #[inline] fn into_raw_fd(self) -> RawFd { self.into_inner().into_socket().into_inner() } } )*}; } impl_into_raw_fd! { TcpStream TcpListener UdpSocket }<|fim▁end|>
/// accidentally allow violating this contract which can cause memory
<|file_name|>patricia-tree-demo.cpp<|end_file_name|><|fim▁begin|>#include <iostream> #include <string> #include <map> #include <unistd.h> #include <utility> #include <sstream> #include "./patricia-tree.hpp" int main(void) { std::cout << "Add new command!\n"; PatriciaTree<Node<std::string, StringKeySpec>> pt; std::string command = "command"; std::string key; while(command != "exit") { getline(std::cin, key); pt.insertNode(key, command); std::cout << pt << "\n"; } return 0;<|fim▁hole|><|fim▁end|>
}