max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
14,668
<reponame>zealoussnow/chromium // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/system/holding_space/holding_space_item_view.h" #include "ash/public/cpp/holding_space/holding_space_client.h" #include "ash/public/cpp/holding_space/holding_space_constants.h" #include "ash/public/cpp/holding_space/holding_space_controller.h" #include "ash/public/cpp/holding_space/holding_space_item.h" #include "ash/public/cpp/holding_space/holding_space_progress.h" #include "ash/public/cpp/shelf_config.h" #include "ash/resources/vector_icons/vector_icons.h" #include "ash/style/ash_color_provider.h" #include "ash/system/holding_space/holding_space_util.h" #include "ash/system/holding_space/holding_space_view_delegate.h" #include "base/bind.h" #include "ui/base/class_property.h" #include "ui/base/dragdrop/drag_drop_types.h" #include "ui/base/metadata/metadata_impl_macros.h" #include "ui/compositor/layer.h" #include "ui/gfx/canvas.h" #include "ui/gfx/color_palette.h" #include "ui/gfx/paint_vector_icon.h" #include "ui/views/accessibility/view_accessibility.h" #include "ui/views/background.h" #include "ui/views/controls/button/image_button.h" #include "ui/views/controls/focus_ring.h" #include "ui/views/controls/image_view.h" #include "ui/views/layout/fill_layout.h" #include "ui/views/painter.h" #include "ui/views/vector_icons.h" #include "ui/views/widget/widget.h" namespace ash { namespace { // A UI class property used to identify if a view is an instance of // `HoldingSpaceItemView`. Class name is not an adequate identifier as it may be // overridden by subclasses. DEFINE_UI_CLASS_PROPERTY_KEY(bool, kIsHoldingSpaceItemViewProperty, false) // Appearance. constexpr size_t kCheckmarkBackgroundSize = 18; // Helpers --------------------------------------------------------------------- // Schedules repaint of `layer`. void InvalidateLayer(ui::Layer* layer) { layer->SchedulePaint(gfx::Rect(layer->size())); } // CallbackPainter ------------------------------------------------------------- // A painter which delegates painting to a callback. class CallbackPainter : public views::Painter { public: using Callback = base::RepeatingCallback<void(gfx::Canvas*, gfx::Size)>; CallbackPainter(const CallbackPainter&) = delete; CallbackPainter& operator=(const CallbackPainter&) = delete; ~CallbackPainter() override = default; // Creates a painted layer which delegates painting to `callback`. static std::unique_ptr<ui::LayerOwner> CreatePaintedLayer(Callback callback) { auto owner = views::Painter::CreatePaintedLayer( base::WrapUnique(new CallbackPainter(callback))); owner->layer()->SetFillsBoundsOpaquely(false); return owner; } private: explicit CallbackPainter(Callback callback) : callback_(callback) {} // views::Painter: gfx::Size GetMinimumSize() const override { return gfx::Size(); } void Paint(gfx::Canvas* canvas, const gfx::Size& size) override { callback_.Run(canvas, size); } Callback callback_; }; // MinimumSizableView --------------------------------------------------------- // A view which respects a minimum size restriction. class MinimumSizableView : public views::View { public: explicit MinimumSizableView(const gfx::Size& min_size) : min_size_(min_size) {} MinimumSizableView(const MinimumSizableView&) = delete; MinimumSizableView& operator=(const MinimumSizableView&) = delete; ~MinimumSizableView() override = default; private: // views::View: gfx::Size CalculatePreferredSize() const override { gfx::Size preferred_size(views::View::CalculatePreferredSize()); preferred_size.SetToMax(min_size_); return preferred_size; } int GetHeightForWidth(int width) const override { return std::max(views::View::GetHeightForWidth(width), min_size_.height()); } const gfx::Size min_size_; }; } // namespace // HoldingSpaceItemView -------------------------------------------------------- HoldingSpaceItemView::HoldingSpaceItemView(HoldingSpaceViewDelegate* delegate, const HoldingSpaceItem* item) : delegate_(delegate), item_(item), item_id_(item->id()) { // Subscribe to be notified of `item_` deletion. Note that it is safe to use a // raw pointer here since `this` owns the callback. item_deletion_subscription_ = item_->AddDeletionCallback(base::BindRepeating( [](HoldingSpaceItemView* view) { view->item_ = nullptr; }, base::Unretained(this))); model_observer_.Observe(HoldingSpaceController::Get()->model()); SetProperty(kIsHoldingSpaceItemViewProperty, true); set_context_menu_controller(delegate_); set_drag_controller(delegate_); SetNotifyEnterExitOnChild(true); // Accessibility. GetViewAccessibility().OverrideName(item->GetAccessibleName()); GetViewAccessibility().OverrideDescription(base::EmptyString16()); GetViewAccessibility().OverrideRole(ax::mojom::Role::kListItem); // Layer. SetPaintToLayer(); layer()->SetFillsBoundsOpaquely(false); // Focus. SetFocusBehavior(FocusBehavior::ALWAYS); set_suppress_default_focus_handling(); focused_layer_owner_ = CallbackPainter::CreatePaintedLayer(base::BindRepeating( &HoldingSpaceItemView::OnPaintFocus, base::Unretained(this))); layer()->Add(focused_layer_owner_->layer()); // Selection. selected_layer_owner_ = CallbackPainter::CreatePaintedLayer(base::BindRepeating( &HoldingSpaceItemView::OnPaintSelect, base::Unretained(this))); layer()->Add(selected_layer_owner_->layer()); // This view's `selected_` state is represented differently depending on // `delegate_`'s selection UI. Register to be notified of changes. selection_ui_changed_subscription_ = delegate_->AddSelectionUiChangedCallback(base::BindRepeating( &HoldingSpaceItemView::OnSelectionUiChanged, base::Unretained(this))); delegate_->OnHoldingSpaceItemViewCreated(this); } HoldingSpaceItemView::~HoldingSpaceItemView() { if (delegate_) delegate_->OnHoldingSpaceItemViewDestroying(this); } // static HoldingSpaceItemView* HoldingSpaceItemView::Cast(views::View* view) { return const_cast<HoldingSpaceItemView*>( Cast(const_cast<const views::View*>(view))); } // static const HoldingSpaceItemView* HoldingSpaceItemView::Cast( const views::View* view) { DCHECK(HoldingSpaceItemView::IsInstance(view)); return static_cast<const HoldingSpaceItemView*>(view); } // static bool HoldingSpaceItemView::IsInstance(const views::View* view) { return view->GetProperty(kIsHoldingSpaceItemViewProperty); } void HoldingSpaceItemView::Reset() { delegate_ = nullptr; } bool HoldingSpaceItemView::HandleAccessibleAction( const ui::AXActionData& action_data) { return (delegate_ && delegate_->OnHoldingSpaceItemViewAccessibleAction( this, action_data)) || views::View::HandleAccessibleAction(action_data); } void HoldingSpaceItemView::OnBoundsChanged(const gfx::Rect& previous_bounds) { gfx::Rect bounds = GetLocalBounds(); // Selection ring. selected_layer_owner_->layer()->SetBounds(bounds); InvalidateLayer(selected_layer_owner_->layer()); // Focus ring. // NOTE: The focus ring is painted just outside the bounds for this view. bounds.Inset(gfx::Insets(kHoldingSpaceFocusInsets)); focused_layer_owner_->layer()->SetBounds(bounds); InvalidateLayer(focused_layer_owner_->layer()); } void HoldingSpaceItemView::OnFocus() { InvalidateLayer(focused_layer_owner_->layer()); } void HoldingSpaceItemView::OnBlur() { InvalidateLayer(focused_layer_owner_->layer()); } void HoldingSpaceItemView::OnGestureEvent(ui::GestureEvent* event) { if (delegate_ && delegate_->OnHoldingSpaceItemViewGestureEvent(this, *event)) event->SetHandled(); } bool HoldingSpaceItemView::OnKeyPressed(const ui::KeyEvent& event) { return delegate_ && delegate_->OnHoldingSpaceItemViewKeyPressed(this, event); } void HoldingSpaceItemView::OnMouseEvent(ui::MouseEvent* event) { switch (event->type()) { case ui::ET_MOUSE_ENTERED: case ui::ET_MOUSE_EXITED: UpdatePrimaryAction(); break; default: break; } views::View::OnMouseEvent(event); } bool HoldingSpaceItemView::OnMousePressed(const ui::MouseEvent& event) { return delegate_ && delegate_->OnHoldingSpaceItemViewMousePressed(this, event); } void HoldingSpaceItemView::OnMouseReleased(const ui::MouseEvent& event) { if (delegate_) delegate_->OnHoldingSpaceItemViewMouseReleased(this, event); } void HoldingSpaceItemView::OnThemeChanged() { views::View::OnThemeChanged(); AshColorProvider* const ash_color_provider = AshColorProvider::Get(); // Background. SetBackground(views::CreateRoundedRectBackground( ash_color_provider->GetControlsLayerColor( AshColorProvider::ControlsLayerType::kControlBackgroundColorInactive), kHoldingSpaceCornerRadius)); // Checkmark. checkmark_->SetBackground(holding_space_util::CreateCircleBackground( ash_color_provider->GetControlsLayerColor( AshColorProvider::ControlsLayerType::kFocusRingColor), kCheckmarkBackgroundSize)); checkmark_->SetImage(gfx::CreateVectorIcon( kCheckIcon, kHoldingSpaceIconSize, ash_color_provider->IsDarkModeEnabled() ? gfx::kGoogleGrey900 : SK_ColorWHITE)); // Focused/selected layers. InvalidateLayer(focused_layer_owner_->layer()); InvalidateLayer(selected_layer_owner_->layer()); if (!primary_action_container_) return; // Cancel. const SkColor icon_color = AshColorProvider::Get()->GetContentLayerColor( AshColorProvider::ContentLayerType::kButtonIconColor); primary_action_cancel_->SetImage( views::Button::STATE_NORMAL, gfx::CreateVectorIcon(kCancelIcon, kHoldingSpaceIconSize, icon_color)); // Pin. const gfx::ImageSkia unpinned_icon = gfx::CreateVectorIcon( views::kUnpinIcon, kHoldingSpaceIconSize, icon_color); const gfx::ImageSkia pinned_icon = gfx::CreateVectorIcon(views::kPinIcon, kHoldingSpaceIconSize, icon_color); primary_action_pin_->SetImage(views::Button::STATE_NORMAL, unpinned_icon); primary_action_pin_->SetToggledImage(views::Button::STATE_NORMAL, &pinned_icon); } void HoldingSpaceItemView::OnHoldingSpaceItemUpdated( const HoldingSpaceItem* item, uint32_t updated_fields) { if (item_ != item) return; // Accessibility. if (updated_fields & UpdatedField::kAccessibleName) { GetViewAccessibility().OverrideName(item_->GetAccessibleName()); NotifyAccessibilityEvent(ax::mojom::Event::kTextChanged, true); } // Primary action. UpdatePrimaryAction(); } void HoldingSpaceItemView::StartDrag(const ui::LocatedEvent& event, ui::mojom::DragEventSource source) { int drag_operations = GetDragOperations(event.location()); if (drag_operations == ui::DragDropTypes::DRAG_NONE) return; views::Widget* widget = GetWidget(); DCHECK(widget); if (widget->dragged_view()) return; auto data = std::make_unique<ui::OSExchangeData>(); WriteDragData(event.location(), data.get()); gfx::Point widget_location(event.location()); views::View::ConvertPointToWidget(this, &widget_location); widget->RunShellDrag(this, std::move(data), widget_location, drag_operations, source); } void HoldingSpaceItemView::SetSelected(bool selected) { if (selected_ == selected) return; selected_ = selected; InvalidateLayer(selected_layer_owner_->layer()); if (delegate_) delegate_->OnHoldingSpaceItemViewSelectedChanged(this); OnSelectionUiChanged(); } views::Builder<views::ImageView> HoldingSpaceItemView::CreateCheckmarkBuilder() { DCHECK(!checkmark_); auto checkmark = views::Builder<views::ImageView>(); checkmark.CopyAddressTo(&checkmark_) .SetID(kHoldingSpaceItemCheckmarkId) .SetVisible(selected()); return checkmark; } views::Builder<views::View> HoldingSpaceItemView::CreatePrimaryActionBuilder( const gfx::Size& min_size) { DCHECK(!primary_action_container_); DCHECK(!primary_action_cancel_); DCHECK(!primary_action_pin_); using HorizontalAlignment = views::ImageButton::HorizontalAlignment; using VerticalAlignment = views::ImageButton::VerticalAlignment; gfx::Size preferred_size(kHoldingSpaceIconSize, kHoldingSpaceIconSize); preferred_size.SetToMax(min_size); auto primary_action = views::Builder<views::View>(); primary_action.CopyAddressTo(&primary_action_container_) .SetID(kHoldingSpaceItemPrimaryActionContainerId) .SetUseDefaultFillLayout(true) .SetVisible(false) .AddChild( views::Builder<views::ImageButton>() .CopyAddressTo(&primary_action_cancel_) .SetID(kHoldingSpaceItemCancelButtonId) .SetCallback(base::BindRepeating( &HoldingSpaceItemView::OnPrimaryActionPressed, base::Unretained(this))) .SetFocusBehavior(views::View::FocusBehavior::NEVER) .SetImageHorizontalAlignment(HorizontalAlignment::ALIGN_CENTER) .SetImageVerticalAlignment(VerticalAlignment::ALIGN_MIDDLE) .SetPreferredSize(preferred_size) .SetVisible(false)) .AddChild( views::Builder<views::ToggleImageButton>() .CopyAddressTo(&primary_action_pin_) .SetID(kHoldingSpaceItemPinButtonId) .SetCallback(base::BindRepeating( &HoldingSpaceItemView::OnPrimaryActionPressed, base::Unretained(this))) .SetFocusBehavior(views::View::FocusBehavior::NEVER) .SetImageHorizontalAlignment(HorizontalAlignment::ALIGN_CENTER) .SetImageVerticalAlignment(VerticalAlignment::ALIGN_MIDDLE) .SetPreferredSize(preferred_size) .SetVisible(false)); return primary_action; } void HoldingSpaceItemView::OnSelectionUiChanged() { const bool multiselect = delegate_ && delegate_->selection_ui() == HoldingSpaceViewDelegate::SelectionUi::kMultiSelect; checkmark_->SetVisible(selected() && multiselect); } void HoldingSpaceItemView::OnPaintFocus(gfx::Canvas* canvas, gfx::Size size) { if (!HasFocus()) return; cc::PaintFlags flags; flags.setAntiAlias(true); flags.setColor(AshColorProvider::Get()->GetControlsLayerColor( AshColorProvider::ControlsLayerType::kFocusRingColor)); flags.setStrokeWidth(views::FocusRing::kDefaultHaloThickness); flags.setStyle(cc::PaintFlags::kStroke_Style); gfx::Rect bounds = gfx::Rect(size); bounds.Inset(gfx::Insets(flags.getStrokeWidth() / 2)); canvas->DrawRoundRect(bounds, kHoldingSpaceFocusCornerRadius, flags); } void HoldingSpaceItemView::OnPaintSelect(gfx::Canvas* canvas, gfx::Size size) { if (!selected_) return; const SkColor color = SkColorSetA(AshColorProvider::Get()->GetControlsLayerColor( AshColorProvider::ControlsLayerType::kFocusRingColor), kHoldingSpaceSelectedOverlayOpacity * 0xFF); cc::PaintFlags flags; flags.setAntiAlias(true); flags.setColor(color); canvas->DrawRoundRect(gfx::Rect(size), kHoldingSpaceCornerRadius, flags); } void HoldingSpaceItemView::OnPrimaryActionPressed() { // If the associated `item()` has been deleted then `this` is in the process // of being destroyed and no action needs to be taken. if (!item()) return; DCHECK_NE(primary_action_cancel_->GetVisible(), primary_action_pin_->GetVisible()); if (delegate()) delegate()->OnHoldingSpaceItemViewPrimaryActionPressed(this); // Cancel. if (primary_action_cancel_->GetVisible()) { HoldingSpaceController::Get()->client()->CancelItems({item()}); return; } // Pin. const bool is_item_pinned = HoldingSpaceController::Get()->model()->ContainsItem( HoldingSpaceItem::Type::kPinnedFile, item()->file_path()); // Unpinning `item()` may result in the destruction of this view. auto weak_ptr = weak_factory_.GetWeakPtr(); if (is_item_pinned) HoldingSpaceController::Get()->client()->UnpinItems({item()}); else HoldingSpaceController::Get()->client()->PinItems({item()}); if (weak_ptr) UpdatePrimaryAction(); } void HoldingSpaceItemView::UpdatePrimaryAction() { // If the associated `item()` has been deleted then `this` is in the process // of being destroyed and no action needs to be taken. if (!item()) return; if (!IsMouseHovered()) { primary_action_container_->SetVisible(false); OnPrimaryActionVisibilityChanged(false); return; } // Cancel. // NOTE: Only download type items currently support cancellation. const bool is_item_in_progress = !item()->progress().IsComplete(); primary_action_cancel_->SetVisible( is_item_in_progress && HoldingSpaceItem::IsDownload(item()->type())); // Pin. const bool is_item_pinned = HoldingSpaceController::Get()->model()->ContainsItem( HoldingSpaceItem::Type::kPinnedFile, item()->file_path()); primary_action_pin_->SetToggled(!is_item_pinned); primary_action_pin_->SetVisible(!is_item_in_progress); // Container. primary_action_container_->SetVisible(primary_action_cancel_->GetVisible() || primary_action_pin_->GetVisible()); OnPrimaryActionVisibilityChanged(primary_action_container_->GetVisible()); } BEGIN_METADATA(HoldingSpaceItemView, views::View) END_METADATA } // namespace ash
6,278
1,338
<reponame>Kirishikesan/haiku<gh_stars>1000+ /* * Copyright 2012, Haiku, Inc. All Rights Reserved. * Distributed under the terms of the MIT License. */ #ifndef _MAIL_SETTINGS_VIEW_H #define _MAIL_SETTINGS_VIEW_H #include <MailSettings.h> #include <View.h> class BMailSettingsView : public BView { public: BMailSettingsView(const char* name); virtual ~BMailSettingsView(); virtual status_t SaveInto( BMailAddOnSettings& settings) const = 0; }; #endif // _MAIL_SETTINGS_VIEW_H
202
1,694
<reponame>CrackerCat/iWeChat // // Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>. // #import <MMCommon/MMObject.h> #import "PBMessageObserverDelegate-Protocol.h" @class NSMutableDictionary; @protocol GameSettingLogicControllerDelegate; @interface GameSettingLogicController : MMObject <PBMessageObserverDelegate> { id <GameSettingLogicControllerDelegate> _delegate; NSMutableDictionary *_dictSetting; } - (void).cxx_destruct; - (void)handleSetGameBlockStateResponse:(id)arg1 Event:(unsigned int)arg2; - (void)handleGetGameBlockStateResponse:(id)arg1 Event:(unsigned int)arg2; - (void)handleGameBlockStateResponse:(id)arg1 Event:(unsigned int)arg2; - (void)MessageReturn:(id)arg1 Event:(unsigned int)arg2; - (void)setGameBlockState:(id)arg1 onOrOff:(_Bool)arg2; - (_Bool)getGameBlockState:(id)arg1; - (_Bool)getSetting:(id)arg1 bit:(unsigned long long)arg2; - (void)setSetting:(id)arg1 bit:(unsigned long long)arg2 value:(_Bool)arg3; - (void)saveSettingDict; - (void)loadSettingDict; - (void)dealloc; - (id)initWithDelegate:(id)arg1; @end
431
394
<gh_stars>100-1000 package net.earthcomputer.multiconnect.protocols.v1_15_2.mixin; import net.earthcomputer.multiconnect.impl.MixinHelper; import net.minecraft.entity.data.TrackedData; import net.minecraft.entity.passive.WolfEntity; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.gen.Accessor; @Mixin(WolfEntity.class) public interface WolfEntityAccessor { @Accessor("ANGER_TIME") static TrackedData<Integer> getAngerTime() { return MixinHelper.fakeInstance(); } }
186
491
#!/usr/bin/python3 import tensorflow as tf print(tf.__version__) import_dir = "./savedmodel" export_dir = import_dir+"_signaturedefs" def get_graph_def_from_saved_model(saved_model_dir): with tf.compat.v1.Session() as session: meta_graph_def = tf.compat.v1.saved_model.loader.load( session, tags=['serve'], export_dir=saved_model_dir ) return meta_graph_def.graph_def graph_def = get_graph_def_from_saved_model(import_dir) input_nodes = ['sub_2'] output_nodes = ['float_segments' ] #, 'float_short_offsets'] with tf.compat.v1.Session(graph=tf.Graph()) as session: tf.compat.v1.import_graph_def(graph_def, name='') inputs = {input_node: session.graph.get_tensor_by_name(f'{input_node}:0') for input_node in input_nodes} outputs = {output_node: session.graph.get_tensor_by_name(f'{output_node}:0') for output_node in output_nodes} tf.compat.v1.saved_model.simple_save( session, export_dir, inputs=inputs, outputs=outputs ) model = tf.saved_model.load(export_dir) concrete_func = model.signatures[tf.saved_model.DEFAULT_SERVING_SIGNATURE_DEF_KEY] concrete_func.inputs[0].set_shape([1, 257, 257, 3]) converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func]) tflite_model = converter.convert() with tf.io.gfile.GFile('bodypix.tflite', 'wb') as f: f.write(tflite_model)
620
1,742
<gh_stars>1000+ from .alphabet import Alphabet, build_alphabet from .morphism import WordMorphism from .paths import WordPaths from .word import Word from .word_options import WordOptions from .word_generators import words from .words import Words, FiniteWords, InfiniteWords from .lyndon_word import LyndonWord, LyndonWords, StandardBracketedLyndonWords
99
434
<gh_stars>100-1000 #!/usr/bin/env python3 from distutils.core import setup setup(name = "trimage", version = "1.0.6", description = "Trimage image compressor - A cross-platform tool for optimizing PNG and JPG files", author = "<NAME>, <NAME>", author_email = "<EMAIL>", url = "http://trimage.org", license = "MIT license", packages = ["trimage", "trimage.ThreadPool"], package_data = {"trimage" : ["pixmaps/*.*"] }, data_files=[('share/icons/hicolor/scalable/apps', ['desktop/trimage.svg']), ('share/applications', ['desktop/trimage.desktop']), ('share/man/man1', ['doc/trimage.1'])], scripts = ["bin/trimage"], long_description = """Trimage is a cross-platform GUI and command-line interface to optimize image files via advpng, jpegoptim, optipng and pngcrush, depending on the filetype (currently, PNG and JPG files are supported). It was inspired by imageoptim. All image files are losslessy compressed on the highest available compression levels. Trimage gives you various input functions to fit your own workflow: A regular file dialog, dragging and dropping and various command line options.""", requires = ["PyQt5"] )
405
5,169
{ "name": "YXTShareLib", "version": "2.0beta", "summary": "分享工具", "description": "云学堂分享", "homepage": "http://www.yxt.com/", "license": "MIT", "authors": { "hanjun": "<EMAIL>" }, "source": { "http": "https://appdown.yunxuetang.cn/t/daxue/sdk/YXT-Share-SDK-iOS-2.0betalib.zip" }, "platforms": { "ios": "8.0" }, "vendored_frameworks": "YXT-Share-SDK-iOS-2.0betalib/YXTShareSDK.framework", "dependencies": { "UMengUShare/Social/WeChat": [ "6.4.8" ], "UMengUShare/Social/QQ": [ "6.4.8" ], "UMengUShare/Social/DingDing": [ "6.4.8" ], "UMengUShare/Social/Sina": [ "6.4.8" ] } }
370
543
<gh_stars>100-1000 /* * Copyright (c) 2005, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.imageio.plugins.tiff; import java.awt.Point; import java.awt.Transparency; import java.awt.color.ColorSpace; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.ComponentColorModel; import java.awt.image.DataBuffer; import java.awt.image.DataBufferByte; import java.awt.image.PixelInterleavedSampleModel; import java.awt.image.Raster; import java.awt.image.SampleModel; import java.awt.image.WritableRaster; import java.io.IOException; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Iterator; import javax.imageio.IIOException; import javax.imageio.IIOImage; import javax.imageio.ImageIO; import javax.imageio.ImageWriteParam; import javax.imageio.ImageWriter; import javax.imageio.metadata.IIOInvalidTreeException; import javax.imageio.metadata.IIOMetadata; import javax.imageio.metadata.IIOMetadataNode; import javax.imageio.spi.ImageWriterSpi; import javax.imageio.plugins.jpeg.JPEGImageWriteParam; import javax.imageio.stream.ImageOutputStream; import javax.imageio.stream.MemoryCacheImageOutputStream; import org.w3c.dom.Node; /** * Base class for all possible forms of JPEG compression in TIFF. */ public abstract class TIFFBaseJPEGCompressor extends TIFFCompressor { // Stream metadata format. protected static final String STREAM_METADATA_NAME = "javax_imageio_jpeg_stream_1.0"; // Image metadata format. protected static final String IMAGE_METADATA_NAME = "javax_imageio_jpeg_image_1.0"; // ImageWriteParam passed in. private ImageWriteParam param = null; /** * ImageWriteParam for JPEG writer. * May be initialized by {@link #initJPEGWriter}. */ protected JPEGImageWriteParam JPEGParam = null; /** * The JPEG writer. * May be initialized by {@link #initJPEGWriter}. */ protected ImageWriter JPEGWriter = null; /** * Whether to write abbreviated JPEG streams (default == false). * A subclass which sets this to {@code true} should also * initialized {@link #JPEGStreamMetadata}. */ protected boolean writeAbbreviatedStream = false; /** * Stream metadata equivalent to a tables-only stream such as in * the {@code JPEGTables}. Default value is {@code null}. * This should be set by any subclass which sets * {@link #writeAbbreviatedStream} to {@code true}. */ protected IIOMetadata JPEGStreamMetadata = null; // A pruned image metadata object containing only essential nodes. private IIOMetadata JPEGImageMetadata = null; // Array-based output stream. private IIOByteArrayOutputStream baos; /** * Removes nonessential nodes from a JPEG native image metadata tree. * All nodes derived from JPEG marker segments other than DHT, DQT, * SOF, SOS segments are removed unless {@code pruneTables} is * {@code true} in which case the nodes derived from the DHT and * DQT marker segments are also removed. * * @param tree A <tt>javax_imageio_jpeg_image_1.0</tt> tree. * @param pruneTables Whether to prune Huffman and quantization tables. * @throws NullPointerException if {@code tree} is * {@code null}. * @throws IllegalArgumentException if {@code tree} is not the root * of a JPEG native image metadata tree. */ private static void pruneNodes(Node tree, boolean pruneTables) { if(tree == null) { throw new NullPointerException("tree == null!"); } if(!tree.getNodeName().equals(IMAGE_METADATA_NAME)) { throw new IllegalArgumentException ("root node name is not "+IMAGE_METADATA_NAME+"!"); } // Create list of required nodes. List<String> wantedNodes = new ArrayList<String>(); wantedNodes.addAll(Arrays.asList(new String[] { "JPEGvariety", "markerSequence", "sof", "componentSpec", "sos", "scanComponentSpec" })); // Add Huffman and quantization table nodes if not pruning tables. if(!pruneTables) { wantedNodes.add("dht"); wantedNodes.add("dhtable"); wantedNodes.add("dqt"); wantedNodes.add("dqtable"); } IIOMetadataNode iioTree = (IIOMetadataNode)tree; List<Node> nodes = getAllNodes(iioTree, null); int numNodes = nodes.size(); for(int i = 0; i < numNodes; i++) { Node node = nodes.get(i); if(!wantedNodes.contains(node.getNodeName())) { node.getParentNode().removeChild(node); } } } private static List<Node> getAllNodes(IIOMetadataNode root, List<Node> nodes) { if(nodes == null) nodes = new ArrayList<Node>(); if(root.hasChildNodes()) { Node sibling = root.getFirstChild(); while(sibling != null) { nodes.add(sibling); nodes = getAllNodes((IIOMetadataNode)sibling, nodes); sibling = sibling.getNextSibling(); } } return nodes; } public TIFFBaseJPEGCompressor(String compressionType, int compressionTagValue, boolean isCompressionLossless, ImageWriteParam param) { super(compressionType, compressionTagValue, isCompressionLossless); this.param = param; } /** * A {@code ByteArrayOutputStream} which allows writing to an * {@code ImageOutputStream}. */ private static class IIOByteArrayOutputStream extends ByteArrayOutputStream { IIOByteArrayOutputStream() { super(); } IIOByteArrayOutputStream(int size) { super(size); } public synchronized void writeTo(ImageOutputStream ios) throws IOException { ios.write(buf, 0, count); } } /** * Initializes the JPEGWriter and JPEGParam instance variables. * This method must be called before encode() is invoked. * * @param supportsStreamMetadata Whether the JPEG writer must * support JPEG native stream metadata, i.e., be capable of writing * abbreviated streams. * @param supportsImageMetadata Whether the JPEG writer must * support JPEG native image metadata. */ protected void initJPEGWriter(boolean supportsStreamMetadata, boolean supportsImageMetadata) { // Reset the writer to null if it does not match preferences. if(this.JPEGWriter != null && (supportsStreamMetadata || supportsImageMetadata)) { ImageWriterSpi spi = this.JPEGWriter.getOriginatingProvider(); if(supportsStreamMetadata) { String smName = spi.getNativeStreamMetadataFormatName(); if(smName == null || !smName.equals(STREAM_METADATA_NAME)) { this.JPEGWriter = null; } } if(this.JPEGWriter != null && supportsImageMetadata) { String imName = spi.getNativeImageMetadataFormatName(); if(imName == null || !imName.equals(IMAGE_METADATA_NAME)) { this.JPEGWriter = null; } } } // Set the writer. if(this.JPEGWriter == null) { Iterator<ImageWriter> iter = ImageIO.getImageWritersByFormatName("jpeg"); while(iter.hasNext()) { // Get a writer. ImageWriter writer = iter.next(); // Verify its metadata support level. if(supportsStreamMetadata || supportsImageMetadata) { ImageWriterSpi spi = writer.getOriginatingProvider(); if(supportsStreamMetadata) { String smName = spi.getNativeStreamMetadataFormatName(); if(smName == null || !smName.equals(STREAM_METADATA_NAME)) { // Try the next one. continue; } } if(supportsImageMetadata) { String imName = spi.getNativeImageMetadataFormatName(); if(imName == null || !imName.equals(IMAGE_METADATA_NAME)) { // Try the next one. continue; } } } // Set the writer. this.JPEGWriter = writer; break; } if(this.JPEGWriter == null) { throw new NullPointerException ("No appropriate JPEG writers found!"); } } // Initialize the ImageWriteParam. if(this.JPEGParam == null) { if(param != null && param instanceof JPEGImageWriteParam) { JPEGParam = (JPEGImageWriteParam)param; } else { JPEGParam = new JPEGImageWriteParam(writer != null ? writer.getLocale() : null); if (param != null && param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) { JPEGParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); JPEGParam.setCompressionQuality(param.getCompressionQuality()); } } } } /** * Retrieves image metadata with non-core nodes removed. */ private IIOMetadata getImageMetadata(boolean pruneTables) throws IIOException { if(JPEGImageMetadata == null && IMAGE_METADATA_NAME.equals(JPEGWriter.getOriginatingProvider().getNativeImageMetadataFormatName())) { TIFFImageWriter tiffWriter = (TIFFImageWriter)this.writer; // Get default image metadata. JPEGImageMetadata = JPEGWriter.getDefaultImageMetadata(tiffWriter.getImageType(), JPEGParam); // Get the DOM tree. Node tree = JPEGImageMetadata.getAsTree(IMAGE_METADATA_NAME); // Remove unwanted marker segments. try { pruneNodes(tree, pruneTables); } catch(IllegalArgumentException e) { throw new IIOException("Error pruning unwanted nodes", e); } // Set the DOM back into the metadata. try { JPEGImageMetadata.setFromTree(IMAGE_METADATA_NAME, tree); } catch(IIOInvalidTreeException e) { throw new IIOException ("Cannot set pruned image metadata!", e); } } return JPEGImageMetadata; } public final int encode(byte[] b, int off, int width, int height, int[] bitsPerSample, int scanlineStride) throws IOException { if (this.JPEGWriter == null) { throw new IIOException("JPEG writer has not been initialized!"); } if (!((bitsPerSample.length == 3 && bitsPerSample[0] == 8 && bitsPerSample[1] == 8 && bitsPerSample[2] == 8) || (bitsPerSample.length == 1 && bitsPerSample[0] == 8))) { throw new IIOException("Can only JPEG compress 8- and 24-bit images!"); } // Set the stream. // The stream has to be wrapped as the Java Image I/O JPEG // ImageWriter flushes the stream at the end of each write() // and this causes problems for the TIFF writer. if (baos == null) { baos = new IIOByteArrayOutputStream(); } else { baos.reset(); } ImageOutputStream ios = new MemoryCacheImageOutputStream(baos); JPEGWriter.setOutput(ios); // Create a DataBuffer. DataBufferByte dbb; if (off == 0) { dbb = new DataBufferByte(b, b.length); } else { // // Workaround for bug in core Java Image I/O JPEG // ImageWriter which cannot handle non-zero offsets. // int bytesPerSegment = scanlineStride * height; byte[] btmp = new byte[bytesPerSegment]; System.arraycopy(b, off, btmp, 0, bytesPerSegment); dbb = new DataBufferByte(btmp, bytesPerSegment); off = 0; } // Set up the ColorSpace. int[] offsets; ColorSpace cs; if (bitsPerSample.length == 3) { offsets = new int[]{off, off + 1, off + 2}; cs = ColorSpace.getInstance(ColorSpace.CS_sRGB); } else { offsets = new int[]{off}; cs = ColorSpace.getInstance(ColorSpace.CS_GRAY); } // Create the ColorModel. ColorModel cm = new ComponentColorModel(cs, false, false, Transparency.OPAQUE, DataBuffer.TYPE_BYTE); // Create the SampleModel. SampleModel sm = new PixelInterleavedSampleModel(DataBuffer.TYPE_BYTE, width, height, bitsPerSample.length, scanlineStride, offsets); // Create the WritableRaster. WritableRaster wras = Raster.createWritableRaster(sm, dbb, new Point(0, 0)); // Create the BufferedImage. BufferedImage bi = new BufferedImage(cm, wras, false, null); // Get the pruned JPEG image metadata (may be null). IIOMetadata imageMetadata = getImageMetadata(writeAbbreviatedStream); // Compress the image into the output stream. int compDataLength; if (writeAbbreviatedStream) { // Write abbreviated JPEG stream // First write the tables-only data. JPEGWriter.prepareWriteSequence(JPEGStreamMetadata); ios.flush(); // Rewind to the beginning of the byte array. baos.reset(); // Write the abbreviated image data. IIOImage image = new IIOImage(bi, null, imageMetadata); JPEGWriter.writeToSequence(image, JPEGParam); JPEGWriter.endWriteSequence(); } else { // Write complete JPEG stream JPEGWriter.write(null, new IIOImage(bi, null, imageMetadata), JPEGParam); } compDataLength = baos.size(); baos.writeTo(stream); baos.reset(); return compDataLength; } @SuppressWarnings("deprecation") protected void finalize() throws Throwable { super.finalize(); if(JPEGWriter != null) { JPEGWriter.dispose(); } } }
7,292
344
<gh_stars>100-1000 /* * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "modules/audio_processing/agc2/rnn_vad/rnn.h" #include "rtc_base/checks.h" #include "third_party/rnnoise/src/rnn_vad_weights.h" namespace webrtc { namespace rnn_vad { namespace { using ::rnnoise::kInputLayerInputSize; static_assert(kFeatureVectorSize == kInputLayerInputSize, ""); using ::rnnoise::kInputDenseBias; using ::rnnoise::kInputDenseWeights; using ::rnnoise::kInputLayerOutputSize; static_assert(kInputLayerOutputSize <= kFullyConnectedLayerMaxUnits, ""); using ::rnnoise::kHiddenGruBias; using ::rnnoise::kHiddenGruRecurrentWeights; using ::rnnoise::kHiddenGruWeights; using ::rnnoise::kHiddenLayerOutputSize; static_assert(kHiddenLayerOutputSize <= kGruLayerMaxUnits, ""); using ::rnnoise::kOutputDenseBias; using ::rnnoise::kOutputDenseWeights; using ::rnnoise::kOutputLayerOutputSize; static_assert(kOutputLayerOutputSize <= kFullyConnectedLayerMaxUnits, ""); } // namespace RnnVad::RnnVad(const AvailableCpuFeatures& cpu_features) : input_(kInputLayerInputSize, kInputLayerOutputSize, kInputDenseBias, kInputDenseWeights, ActivationFunction::kTansigApproximated, cpu_features, /*layer_name=*/"FC1"), hidden_(kInputLayerOutputSize, kHiddenLayerOutputSize, kHiddenGruBias, kHiddenGruWeights, kHiddenGruRecurrentWeights, cpu_features, /*layer_name=*/"GRU1"), output_(kHiddenLayerOutputSize, kOutputLayerOutputSize, kOutputDenseBias, kOutputDenseWeights, ActivationFunction::kSigmoidApproximated, // The output layer is just 24x1. The unoptimized code is faster. NoAvailableCpuFeatures(), /*layer_name=*/"FC2") { // Input-output chaining size checks. RTC_DCHECK_EQ(input_.size(), hidden_.input_size()) << "The input and the hidden layers sizes do not match."; RTC_DCHECK_EQ(hidden_.size(), output_.input_size()) << "The hidden and the output layers sizes do not match."; } RnnVad::~RnnVad() = default; void RnnVad::Reset() { hidden_.Reset(); } float RnnVad::ComputeVadProbability( rtc::ArrayView<const float, kFeatureVectorSize> feature_vector, bool is_silence) { if (is_silence) { Reset(); return 0.f; } input_.ComputeOutput(feature_vector); hidden_.ComputeOutput(input_); output_.ComputeOutput(hidden_); RTC_DCHECK_EQ(output_.size(), 1); return output_.data()[0]; } } // namespace rnn_vad } // namespace webrtc
1,218
925
<filename>Source/Drivers/OniFile/Formats/XnCodec.cpp /***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ //--------------------------------------------------------------------------- // Includes //--------------------------------------------------------------------------- #include "XnCodec.h" #include "XnCodecIDs.h" XnCompressionFormats XnCodec::GetCompressionFormatFromCodecID(XnCodecID codecID) { switch (codecID) { case XN_CODEC_UNCOMPRESSED: return XN_COMPRESSION_NONE; case XN_CODEC_16Z: return XN_COMPRESSION_16Z; case XN_CODEC_16Z_EMB_TABLES: return XN_COMPRESSION_16Z_EMB_TABLE; case XN_CODEC_8Z: return XN_COMPRESSION_COLOR_8Z; case XN_CODEC_JPEG: return XN_COMPRESSION_JPEG; default: return (XnCompressionFormats)-1; } } XnCodecID XnCodec::GetCodecIDFromCompressionFormat(XnCompressionFormats format) { switch (format) { case XN_COMPRESSION_16Z: return XN_CODEC_16Z; case XN_COMPRESSION_16Z_EMB_TABLE: return XN_CODEC_16Z_EMB_TABLES; case XN_COMPRESSION_JPEG: return XN_CODEC_JPEG; case XN_COMPRESSION_NONE: return XN_CODEC_UNCOMPRESSED; case XN_COMPRESSION_COLOR_8Z: return XN_CODEC_8Z; default: return XN_CODEC_NULL; } }
1,439
307
<gh_stars>100-1000 package test; import com.nlf.calendar.Fu; import com.nlf.calendar.Lunar; import com.nlf.calendar.Solar; import org.junit.Assert; import org.junit.Test; /** * 三伏测试 * * @author 6tail */ public class FuTest { @Test public void test1(){ Solar solar = new Solar(2011,7,14); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"初伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"初伏第1天",fu.toFullString()); } @Test public void test2(){ Solar solar = new Solar(2011,7,23); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"初伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"初伏第10天",fu.toFullString()); } @Test public void test3(){ Solar solar = new Solar(2011,7,24); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"中伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"中伏第1天",fu.toFullString()); } @Test public void test4(){ Solar solar = new Solar(2011,8,12); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"中伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"中伏第20天",fu.toFullString()); } @Test public void test5(){ Solar solar = new Solar(2011,8,13); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"末伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"末伏第1天",fu.toFullString()); } @Test public void test6(){ Solar solar = new Solar(2011,8,22); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"末伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"末伏第10天",fu.toFullString()); } @Test public void test7(){ Solar solar = new Solar(2011,7,13); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertNull(solar.toYmd(),fu); } @Test public void test8(){ Solar solar = new Solar(2011,8,23); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertNull(solar.toYmd(),fu); } @Test public void test9(){ Solar solar = new Solar(2012,7,18); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"初伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"初伏第1天",fu.toFullString()); } @Test public void test10(){ Solar solar = new Solar(2012,8,5); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"中伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"中伏第9天",fu.toFullString()); } @Test public void test11(){ Solar solar = new Solar(2012,8,8); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"末伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"末伏第2天",fu.toFullString()); } @Test public void test12(){ Solar solar = new Solar(2020,7,17); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"初伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"初伏第2天",fu.toFullString()); } @Test public void test13(){ Solar solar = new Solar(2020,7,26); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"中伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"中伏第1天",fu.toFullString()); } @Test public void test14(){ Solar solar = new Solar(2020,8,24); Lunar lunar = solar.getLunar(); Fu fu = lunar.getFu(); Assert.assertEquals(solar.toYmd(),"末伏",fu.toString()); Assert.assertEquals(solar.toYmd(),"末伏第10天",fu.toFullString()); } }
1,685
1,174
<filename>packages/treat/webpack-plugin/loader/package.json { "main": "dist/treat-webpack-plugin-loader.cjs.js", "module": "dist/treat-webpack-plugin-loader.esm.js", "browser": { "./dist/treat-webpack-plugin-loader.cjs.js": "./dist/treat-webpack-plugin-loader.browser.cjs.js", "./dist/treat-webpack-plugin-loader.esm.js": "./dist/treat-webpack-plugin-loader.browser.esm.js" } }
163
1,248
<reponame>mario-renau-alstom/atlas /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.atlas.repository.graphdb.janus; import com.google.common.base.Preconditions; import org.apache.atlas.AtlasConfiguration; import org.apache.atlas.repository.graphdb.AtlasCardinality; import org.apache.atlas.repository.graphdb.AtlasEdgeDirection; import org.apache.atlas.repository.graphdb.AtlasEdgeLabel; import org.apache.atlas.repository.graphdb.AtlasElement; import org.apache.atlas.repository.graphdb.AtlasGraphIndex; import org.apache.atlas.repository.graphdb.AtlasGraphManagement; import org.apache.atlas.repository.graphdb.AtlasPropertyKey; import org.apache.commons.lang.StringUtils; import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Element; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.janusgraph.core.Cardinality; import org.janusgraph.core.EdgeLabel; import org.janusgraph.core.JanusGraph; import org.janusgraph.core.JanusGraphElement; import org.janusgraph.core.JanusGraphFactory; import org.janusgraph.core.PropertyKey; import org.janusgraph.core.log.TransactionRecovery; import org.janusgraph.core.schema.ConsistencyModifier; import org.janusgraph.core.schema.JanusGraphIndex; import org.janusgraph.core.schema.JanusGraphManagement; import org.janusgraph.core.schema.JanusGraphManagement.IndexBuilder; import org.janusgraph.core.schema.Mapping; import org.janusgraph.core.schema.Parameter; import org.janusgraph.core.schema.PropertyKeyMaker; import org.janusgraph.core.schema.SchemaStatus; import org.janusgraph.diskstorage.BackendTransaction; import org.janusgraph.diskstorage.indexing.IndexEntry; import org.janusgraph.graphdb.database.IndexSerializer; import org.janusgraph.graphdb.database.StandardJanusGraph; import org.janusgraph.graphdb.database.management.GraphIndexStatusReport; import org.janusgraph.graphdb.database.management.ManagementSystem; import org.janusgraph.graphdb.internal.Token; import org.janusgraph.graphdb.log.StandardTransactionLogProcessor; import org.janusgraph.graphdb.transaction.StandardJanusGraphTx; import org.janusgraph.graphdb.types.IndexType; import org.janusgraph.graphdb.types.MixedIndexType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Instant; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import static org.janusgraph.core.schema.SchemaAction.ENABLE_INDEX; import static org.janusgraph.core.schema.SchemaStatus.ENABLED; import static org.janusgraph.core.schema.SchemaStatus.INSTALLED; import static org.janusgraph.core.schema.SchemaStatus.REGISTERED; /** * Janus implementation of AtlasGraphManagement. */ public class AtlasJanusGraphManagement implements AtlasGraphManagement { private static final boolean lockEnabled = AtlasConfiguration.STORAGE_CONSISTENCY_LOCK_ENABLED.getBoolean(); private static final Parameter[] STRING_PARAMETER_ARRAY = new Parameter[]{Mapping.STRING.asParameter()}; private static final Logger LOG = LoggerFactory.getLogger(AtlasJanusGraphManagement.class); private static final char[] RESERVED_CHARS = { '{', '}', '"', '$', Token.SEPARATOR_CHAR }; private AtlasJanusGraph graph; private JanusGraphManagement management; private Set<String> newMultProperties = new HashSet<>(); public AtlasJanusGraphManagement(AtlasJanusGraph graph, JanusGraphManagement managementSystem) { this.management = managementSystem; this.graph = graph; } @Override public void createVertexMixedIndex(String indexName, String backingIndex, List<AtlasPropertyKey> propertyKeys) { IndexBuilder indexBuilder = management.buildIndex(indexName, Vertex.class); for (AtlasPropertyKey key : propertyKeys) { PropertyKey janusKey = AtlasJanusObjectFactory.createPropertyKey(key); indexBuilder.addKey(janusKey); } indexBuilder.buildMixedIndex(backingIndex); } @Override public void createEdgeMixedIndex(String indexName, String backingIndex, List<AtlasPropertyKey> propertyKeys) { IndexBuilder indexBuilder = management.buildIndex(indexName, Edge.class); for (AtlasPropertyKey key : propertyKeys) { PropertyKey janusKey = AtlasJanusObjectFactory.createPropertyKey(key); indexBuilder.addKey(janusKey); } indexBuilder.buildMixedIndex(backingIndex); } @Override public void createEdgeIndex(String label, String indexName, AtlasEdgeDirection edgeDirection, List<AtlasPropertyKey> propertyKeys) { EdgeLabel edgeLabel = management.getEdgeLabel(label); if (edgeLabel == null) { edgeLabel = management.makeEdgeLabel(label).make(); } Direction direction = AtlasJanusObjectFactory.createDirection(edgeDirection); PropertyKey[] keys = AtlasJanusObjectFactory.createPropertyKeys(propertyKeys); if (management.getRelationIndex(edgeLabel, indexName) == null) { management.buildEdgeIndex(edgeLabel, indexName, direction, keys); } } @Override public void createFullTextMixedIndex(String indexName, String backingIndex, List<AtlasPropertyKey> propertyKeys) { IndexBuilder indexBuilder = management.buildIndex(indexName, Vertex.class); for (AtlasPropertyKey key : propertyKeys) { PropertyKey janusKey = AtlasJanusObjectFactory.createPropertyKey(key); indexBuilder.addKey(janusKey, org.janusgraph.core.schema.Parameter.of("mapping", Mapping.TEXT)); } indexBuilder.buildMixedIndex(backingIndex); } @Override public boolean containsPropertyKey(String propertyName) { return management.containsPropertyKey(propertyName); } @Override public void rollback() { management.rollback(); } @Override public void commit() { graph.addMultiProperties(newMultProperties); newMultProperties.clear(); management.commit(); } private static void checkName(String name) { //for some reason, name checking was removed from StandardPropertyKeyMaker.make() //in Janus. For consistency, do the check here. Preconditions.checkArgument(StringUtils.isNotBlank(name), "Need to specify name"); for (char c : RESERVED_CHARS) { Preconditions.checkArgument(name.indexOf(c) < 0, "Name can not contains reserved character %s: %s", c, name); } } @Override public AtlasPropertyKey makePropertyKey(String propertyName, Class propertyClass, AtlasCardinality cardinality) { if (cardinality.isMany()) { newMultProperties.add(propertyName); } PropertyKeyMaker propertyKeyBuilder = management.makePropertyKey(propertyName).dataType(propertyClass); if (cardinality != null) { Cardinality janusCardinality = AtlasJanusObjectFactory.createCardinality(cardinality); propertyKeyBuilder.cardinality(janusCardinality); } PropertyKey propertyKey = propertyKeyBuilder.make(); return GraphDbObjectFactory.createPropertyKey(propertyKey); } @Override public AtlasEdgeLabel makeEdgeLabel(String label) { EdgeLabel edgeLabel = management.makeEdgeLabel(label).make(); return GraphDbObjectFactory.createEdgeLabel(edgeLabel); } @Override public void deletePropertyKey(String propertyKey) { PropertyKey janusPropertyKey = management.getPropertyKey(propertyKey); if (null == janusPropertyKey) return; for (int i = 0;; i++) { String deletedKeyName = janusPropertyKey + "_deleted_" + i; if (null == management.getPropertyKey(deletedKeyName)) { management.changeName(janusPropertyKey, deletedKeyName); break; } } } @Override public AtlasPropertyKey getPropertyKey(String propertyName) { checkName(propertyName); return GraphDbObjectFactory.createPropertyKey(management.getPropertyKey(propertyName)); } @Override public AtlasEdgeLabel getEdgeLabel(String label) { return GraphDbObjectFactory.createEdgeLabel(management.getEdgeLabel(label)); } @Override public String addMixedIndex(String indexName, AtlasPropertyKey propertyKey, boolean isStringField) { PropertyKey janusKey = AtlasJanusObjectFactory.createPropertyKey(propertyKey); JanusGraphIndex janusGraphIndex = management.getGraphIndex(indexName); if(isStringField) { management.addIndexKey(janusGraphIndex, janusKey, Mapping.STRING.asParameter()); LOG.debug("created a string type for {} with janueKey {}.", propertyKey.getName(), janusKey); } else { management.addIndexKey(janusGraphIndex, janusKey); LOG.debug("created a default type for {} with janueKey {}.", propertyKey.getName(), janusKey); } String encodedName = ""; if(isStringField) { encodedName = graph.getIndexFieldName(propertyKey, janusGraphIndex, STRING_PARAMETER_ARRAY); } else { encodedName = graph.getIndexFieldName(propertyKey, janusGraphIndex); } LOG.info("property '{}' is encoded to '{}'.", propertyKey.getName(), encodedName); return encodedName; } @Override public String getIndexFieldName(String indexName, AtlasPropertyKey propertyKey, boolean isStringField) { JanusGraphIndex janusGraphIndex = management.getGraphIndex(indexName); if(isStringField) { return graph.getIndexFieldName(propertyKey, janusGraphIndex, STRING_PARAMETER_ARRAY); } else { return graph.getIndexFieldName(propertyKey, janusGraphIndex); } } public AtlasGraphIndex getGraphIndex(String indexName) { JanusGraphIndex index = management.getGraphIndex(indexName); return GraphDbObjectFactory.createGraphIndex(index); } @Override public boolean edgeIndexExist(String label, String indexName) { EdgeLabel edgeLabel = management.getEdgeLabel(label); return edgeLabel != null && management.getRelationIndex(edgeLabel, indexName) != null; } @Override public void createVertexCompositeIndex(String propertyName, boolean isUnique, List<AtlasPropertyKey> propertyKeys) { createCompositeIndex(propertyName, isUnique, propertyKeys, Vertex.class); } @Override public void createEdgeCompositeIndex(String propertyName, boolean isUnique, List<AtlasPropertyKey> propertyKeys) { createCompositeIndex(propertyName, isUnique, propertyKeys, Edge.class); } private void createCompositeIndex(String propertyName, boolean isUnique, List<AtlasPropertyKey> propertyKeys, Class<? extends Element> elementType) { IndexBuilder indexBuilder = management.buildIndex(propertyName, elementType); for (AtlasPropertyKey key : propertyKeys) { PropertyKey janusKey = AtlasJanusObjectFactory.createPropertyKey(key); indexBuilder.addKey(janusKey); } if (isUnique) { indexBuilder.unique(); } JanusGraphIndex index = indexBuilder.buildCompositeIndex(); if (lockEnabled && isUnique) { management.setConsistency(index, ConsistencyModifier.LOCK); } } @Override public void updateUniqueIndexesForConsistencyLock() { try { setConsistency(this.management, Vertex.class); setConsistency(this.management, Edge.class); } finally { commit(); } } @Override public void updateSchemaStatus() { updateSchemaStatus(this.management, this.graph.getGraph(), Vertex.class); updateSchemaStatus(this.management, this.graph.getGraph(), Edge.class); } public static void updateSchemaStatus(JanusGraphManagement mgmt, JanusGraph graph, Class<? extends Element> elementType) { LOG.info("updating SchemaStatus for {}: Starting...", elementType.getSimpleName()); int count = 0; Iterable<JanusGraphIndex> iterable = mgmt.getGraphIndexes(elementType); for (JanusGraphIndex index : iterable) { if (index.isCompositeIndex()) { PropertyKey[] propertyKeys = index.getFieldKeys(); SchemaStatus status = index.getIndexStatus(propertyKeys[0]); String indexName = index.name(); try { if (status == REGISTERED) { JanusGraphManagement management = graph.openManagement(); JanusGraphIndex indexToUpdate = management.getGraphIndex(indexName); management.updateIndex(indexToUpdate, ENABLE_INDEX).get(); management.commit(); GraphIndexStatusReport report = ManagementSystem.awaitGraphIndexStatus(graph, indexName).status(ENABLED).call(); if (!report.getConvergedKeys().isEmpty() && report.getConvergedKeys().containsKey(indexName)) { LOG.info("SchemaStatus updated for index: {}, from {} to {}.", index.name(), REGISTERED, ENABLED); count++; } else if (!report.getNotConvergedKeys().isEmpty() && report.getNotConvergedKeys().containsKey(indexName)) { LOG.error("SchemaStatus failed to update index: {}, from {} to {}.", index.name(), REGISTERED, ENABLED); } } else if (status == INSTALLED) { LOG.warn("SchemaStatus {} found for index: {}", INSTALLED, indexName); } } catch (InterruptedException e) { LOG.error("IllegalStateException for indexName : {}, Exception: ", indexName, e); } catch (ExecutionException e) { LOG.error("ExecutionException for indexName : {}, Exception: ", indexName, e); } } } LOG.info("updating SchemaStatus for {}: {}: Done!", elementType.getSimpleName(), count); } private static void setConsistency(JanusGraphManagement mgmt, Class<? extends Element> elementType) { LOG.info("setConsistency: {}: Starting...", elementType.getSimpleName()); int count = 0; try { Iterable<JanusGraphIndex> iterable = mgmt.getGraphIndexes(elementType); for (JanusGraphIndex index : iterable) { if (!index.isCompositeIndex() || !index.isUnique() || mgmt.getConsistency(index) == ConsistencyModifier.LOCK) { continue; } for (PropertyKey propertyKey : index.getFieldKeys()) { LOG.info("setConsistency: {}: {}", count, propertyKey.name()); } mgmt.setConsistency(index, ConsistencyModifier.LOCK); count++; } } finally { LOG.info("setConsistency: {}: {}: Done!", elementType.getSimpleName(), count); } } @Override public void reindex(String indexName, List<AtlasElement> elements) throws Exception { try { JanusGraphIndex index = management.getGraphIndex(indexName); if (index == null || !(management instanceof ManagementSystem) || !(graph.getGraph() instanceof StandardJanusGraph)) { LOG.error("Could not retrieve index for name: {} ", indexName); return; } ManagementSystem managementSystem = (ManagementSystem) management; IndexType indexType = managementSystem.getSchemaVertex(index).asIndexType(); if (!(indexType instanceof MixedIndexType)) { LOG.warn("Index: {}: Not of MixedIndexType ", indexName); return; } IndexSerializer indexSerializer = ((StandardJanusGraph) graph.getGraph()).getIndexSerializer(); reindexElement(managementSystem, indexSerializer, (MixedIndexType) indexType, elements); } catch (Exception exception) { throw exception; } finally { management.commit(); } } @Override public Object startIndexRecovery(long recoveryStartTime) { Instant recoveryStartInstant = Instant.ofEpochMilli(recoveryStartTime); JanusGraph janusGraph = this.graph.getGraph(); return JanusGraphFactory.startTransactionRecovery(janusGraph, recoveryStartInstant); } @Override public void stopIndexRecovery(Object txRecoveryObject) { if (txRecoveryObject == null) { return; } try { if (txRecoveryObject instanceof TransactionRecovery) { TransactionRecovery txRecovery = (TransactionRecovery) txRecoveryObject; StandardJanusGraph janusGraph = (StandardJanusGraph) this.graph.getGraph(); LOG.info("stopIndexRecovery: Index Client is unhealthy. Index recovery: Paused!"); janusGraph.getBackend().getSystemTxLog().close(); txRecovery.shutdown(); } else { LOG.error("stopIndexRecovery({}): Invalid transaction recovery object!", txRecoveryObject); } } catch (Exception e) { LOG.warn("stopIndexRecovery: Error while shutting down transaction recovery", e); } } @Override public void printIndexRecoveryStats(Object txRecoveryObject) { if (txRecoveryObject == null) { return; } try { if (txRecoveryObject instanceof TransactionRecovery) { StandardTransactionLogProcessor txRecovery = (StandardTransactionLogProcessor) txRecoveryObject; long[] statistics = txRecovery.getStatistics(); if (statistics.length >= 2) { LOG.info("Index Recovery: Stats: Success:{}: Failed: {}", statistics[0], statistics[1]); } else { LOG.info("Index Recovery: Stats: {}", statistics); } } else { LOG.error("Transaction stats: Invalid transaction recovery object!: Unexpected type: {}: Details: {}", txRecoveryObject.getClass().toString(), txRecoveryObject); } } catch (Exception e) { LOG.error("Error: Retrieving log transaction stats!", e); } } private void reindexElement(ManagementSystem managementSystem, IndexSerializer indexSerializer, MixedIndexType indexType, List<AtlasElement> elements) throws Exception { Map<String, Map<String, List<IndexEntry>>> documentsPerStore = new HashMap<>(); StandardJanusGraphTx tx = managementSystem.getWrappedTx(); BackendTransaction txHandle = tx.getTxHandle(); try { JanusGraphElement janusGraphElement = null; for (AtlasElement element : elements) { try { if (element == null || element.getWrappedElement() == null) { continue; } janusGraphElement = element.getWrappedElement(); indexSerializer.reindexElement(janusGraphElement, indexType, documentsPerStore); } catch (Exception e) { LOG.warn("{}: Exception: {}:{}", indexType.getName(), e.getClass().getSimpleName(), e.getMessage()); } } } finally { if (txHandle != null) { txHandle.getIndexTransaction(indexType.getBackingIndexName()).restore(documentsPerStore); } } } }
8,036
1,042
<reponame>scottcgi/Mojoc /* * Copyright (c) scott.cgi All Rights Reserved. * * This source code belongs to project Mojoc, which is a pure C Game Engine hosted on GitHub. * The Mojoc Game Engine is licensed under the MIT License, and will continue to be iterated with coding passion. * * License : https://github.com/scottcgi/Mojoc/blob/master/LICENSE * GitHub : https://github.com/scottcgi/Mojoc * CodeStyle: https://github.com/scottcgi/Mojoc/blob/master/Docs/CodeStyle.md * * Since : 2016-11-13 * Update : 2019-1-9 * Author : scott.cgi */ #include <stdlib.h> #include "Engine/Toolkit/HeaderUtils/Define.h" #include "Engine/Toolkit/Utils/Coroutine.h" #include "Engine/Toolkit/Platform/Log.h" static ArrayList(Coroutine*) coroutineRunningList[1] = AArrayList_Init(Coroutine*, 25); static ArrayList(Coroutine*) coroutineCacheList [1] = AArrayList_Init(Coroutine*, 25); static Coroutine* StartCoroutine(CoroutineRun Run) { Coroutine* coroutine = AArrayList_Pop(coroutineCacheList, Coroutine*); if (coroutine == NULL) { coroutine = malloc(sizeof(Coroutine)); AArrayList->Init(sizeof(Coroutine*), coroutine->waits); coroutine->waits->increase = 4; } else { AArrayList->Clear(coroutine->waits); } coroutine->Run = Run; coroutine->step = 0; coroutine->waitValue = 0.0f; coroutine->curWaitValue = 0.0f; coroutine->waitType = CoroutineWaitType_Null; coroutine->state = CoroutineState_Ready; AUserData_Init(coroutine->userData); AArrayList_Add(coroutineRunningList, coroutine); return coroutine; } static void Update(float deltaSeconds) { for (int i = coroutineRunningList->size - 1; i > -1; --i) { Coroutine* coroutine = AArrayList_Get(coroutineRunningList, i, Coroutine*); if (coroutine->waitType == CoroutineWaitType_Coroutines) { continue; } else if (coroutine->curWaitValue >= coroutine->waitValue) { coroutine->Run(coroutine); if (coroutine->state == CoroutineState_Finish) { AArrayList->RemoveByLast(coroutineRunningList, i); // add to cache AArrayList_Add(coroutineCacheList, coroutine); // set waiting coroutines execute forward for (int j = 0; j < coroutine->waits->size; ++j) { Coroutine* wait = AArrayList_Get(coroutine->waits, j, Coroutine*); ALog_A ( wait->state != CoroutineState_Finish, "Coroutine [%p] cannot finish before wait coroutine [%p] finish", wait, coroutine ); wait->waitType = CoroutineWaitType_Null; } continue; } } else { switch (coroutine->waitType) { case CoroutineWaitType_Frames: coroutine->curWaitValue += 1.0f; break; case CoroutineWaitType_Seconds: coroutine->curWaitValue += deltaSeconds; break; case CoroutineWaitType_Null: break; case CoroutineWaitType_Coroutines: break; } } } } struct ACoroutine ACoroutine[1] = {{ StartCoroutine, Update, }};
1,747
9,778
/*------------------------------------------------------------------------- * * interrupt.h * Interrupt handling routines. * * Responses to interrupts are fairly varied and many types of backends * have their own implementations, but we provide a few generic things * here to facilitate code reuse. * * Portions Copyright (c) 1996-2021, PostgreSQL Global Development Group * Portions Copyright (c) 1994, Regents of the University of California * * IDENTIFICATION * src/include/postmaster/interrupt.h * *------------------------------------------------------------------------- */ #ifndef INTERRUPT_H #define INTERRUPT_H #include <signal.h> extern PGDLLIMPORT volatile sig_atomic_t ConfigReloadPending; extern PGDLLIMPORT volatile sig_atomic_t ShutdownRequestPending; extern void HandleMainLoopInterrupts(void); extern void SignalHandlerForConfigReload(SIGNAL_ARGS); extern void SignalHandlerForCrashExit(SIGNAL_ARGS); extern void SignalHandlerForShutdownRequest(SIGNAL_ARGS); #endif
266
1,279
<filename>DbGit/pluginsdk/yara/yara/strutils.h /* Copyright (c) 2007-2014. The YARA Authors. All Rights Reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef YR_STRUTILS_H #define YR_STRUTILS_H #include <assert.h> #include <stdlib.h> #include "integers.h" // Cygwin already has these functions. #if defined(_WIN32) && !defined(__CYGWIN__) #if defined(_MSC_VER) && _MSC_VER < 1900 #define snprintf _snprintf #endif #define strcasecmp _stricmp #define strncasecmp _strnicmp #endif uint64_t xtoi( const char* hexstr); #if !HAVE_STRLCPY && !defined(strlcpy) size_t strlcpy( char* dst, const char* src, size_t size); #endif #if !HAVE_STRLCAT && !defined(strlcat) size_t strlcat( char* dst, const char* src, size_t size); #endif #if !HAVE_MEMMEM && !defined(memmem) void* memmem( const void* haystack, size_t haystack_size, const void* needle, size_t needle_size); #endif int strnlen_w( const char* w_str); int strcmp_w( const char* w_str, const char* str); size_t strlcpy_w( char* dst, const char* w_src, size_t n); #endif
948
2,039
package org.nd4j.jita.flow.impl; import lombok.Getter; import org.nd4j.jita.allocator.Allocator; import org.nd4j.jita.allocator.context.ContextPack; import org.nd4j.jita.allocator.enums.AllocationStatus; import org.nd4j.jita.allocator.enums.CudaConstants; import org.nd4j.jita.allocator.impl.AllocationPoint; import org.nd4j.jita.allocator.pointers.cuda.cudaEvent_t; import org.nd4j.jita.allocator.pointers.cuda.cudaStream_t; import org.nd4j.jita.allocator.time.TimeProvider; import org.nd4j.jita.allocator.time.providers.OperativeProvider; import org.nd4j.jita.allocator.utils.AllocationUtils; import org.nd4j.jita.concurrency.EventsProvider; import org.nd4j.jita.conf.Configuration; import org.nd4j.jita.conf.CudaEnvironment; import org.nd4j.jita.flow.FlowController; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.jcublas.context.CudaContext; import org.nd4j.nativeblas.NativeOps; import org.nd4j.nativeblas.NativeOpsHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; /** * Experimental code, do not use please. * * @author <EMAIL> */ @Deprecated public class AsynchronousFlowController implements FlowController { private volatile Allocator allocator; private static final Configuration configuration = CudaEnvironment.getInstance().getConfiguration(); private static Logger log = LoggerFactory.getLogger(AsynchronousFlowController.class); protected NativeOps nativeOps = NativeOpsHolder.getInstance().getDeviceNativeOps(); @Getter protected EventsProvider eventsProvider = new EventsProvider(); private transient TimeProvider timeProvider = new OperativeProvider(); protected AtomicLong asyncHit = new AtomicLong(0); protected AtomicLong asyncMiss = new AtomicLong(0); protected Map<Integer, AtomicLong> lanesCounter = new ConcurrentHashMap<>(); private AtomicLong totalHits = new AtomicLong(0); protected static final int MAX_EXECUTION_QUEUE = configuration.getCommandQueueLength(); protected static final AtomicLong eventCounts = new AtomicLong(0); protected ArrayList<ArrayList<Queue<cudaEvent_t>>> eventsBarrier = new ArrayList<>(); protected ArrayList<ArrayList<AtomicLong>> laneClocks = new ArrayList<>(); protected ArrayList<AtomicLong> deviceClocks = new ArrayList<>(); public AsynchronousFlowController() { int numLanes = configuration.getCommandLanesNumber(); int numDevices = Nd4j.getAffinityManager().getNumberOfDevices(); for (int d = 0; d < numDevices; d++) { eventsBarrier.add(d, new ArrayList<Queue<cudaEvent_t>>()); laneClocks.add(d, new ArrayList<AtomicLong>()); deviceClocks.add(d, new AtomicLong(0)); for (int l = 0; l < numLanes; l++) { eventsBarrier.get(d).add(l, new ConcurrentLinkedQueue<cudaEvent_t>()); laneClocks.get(d).add(l, new AtomicLong(0)); } } } @Override public void synchronizeToDevice(AllocationPoint point) { } @Override public void init(Allocator allocator) { this.allocator = allocator; } @Override public void synchronizeToHost(AllocationPoint point) { if (!point.isActualOnHostSide()) { if (!point.isConstant()) waitTillFinished(point); // log.info("Synchronization started... " + point.getShape()); // if this piece of memory is device-dependant, we'll also issue copyback once if (point.getAllocationStatus() == AllocationStatus.DEVICE && !point.isActualOnHostSide()) { CudaContext context = (CudaContext) allocator.getDeviceContext().getContext(); if (nativeOps.memcpyAsync(point.getHostPointer(), point.getDevicePointer(), AllocationUtils.getRequiredMemory(point.getShape()), CudaConstants.cudaMemcpyDeviceToHost, context.getSpecialStream()) == 0) throw new IllegalStateException("MemcpyAsync D2H failed: [" + point.getDevicePointer().address() + "] -> [" + point.getHostPointer().address() + "]"); commitTransfer(context.getSpecialStream()); } // else log.info("Not [DEVICE] memory, skipping..."); // updating host read timer point.tickHostRead(); //log.info("After sync... isActualOnHostSide: {}", point.isActualOnHostSide()); } } @Override public void waitTillFinished(AllocationPoint point) { cudaEvent_t event = point.getWriteLane(); if (event != null) { event.synchronize(); event.destroy(); } } public void waitTillReleased(AllocationPoint point) { waitTillFinished(point); cudaEvent_t event; while ((event = point.getReadLane().poll()) != null) { event.synchronize(); event.destroy(); } } @Override public void registerAction(CudaContext context, INDArray result, INDArray... operands) { // TODO: this should be lane-dependant context if (totalHits.incrementAndGet() % 25000 == 0) { log.debug("AsyncHit ratio: [{}]", getAsyncHitRatio()); /* for (int lane = 0; lane < allocator.getContextPool().acquireContextPackForDevice(0).getAvailableLanes(); lane++) { log.debug("Lane [{}]: {} ", lane, lanesCounter.get(lane).get()); } */ } cudaEvent_t event = new cudaEvent_t(nativeOps.createEvent()); event.setLaneId(context.getLaneId()); nativeOps.registerEvent(event, context.getOldStream()); if (result != null) { setWriteLane(result, event); allocator.tickDeviceWrite(result); } for (INDArray operand : operands) { if (operand == null) continue; setReadLane(operand, event); } Integer deviceId = allocator.getDeviceId(); fillTail(deviceId, event.getLaneId(), event); } @Override public void registerActionAllWrite(CudaContext context, INDArray... operands) { } protected void setWriteLane(INDArray array, cudaEvent_t event) { AllocationPoint point = allocator.getAllocationPoint(array); point.setWriteLane(event); } protected void setReadLane(INDArray array, cudaEvent_t event) { AllocationPoint point = allocator.getAllocationPoint(array); point.addReadLane(event); } protected Queue<cudaEvent_t> getReadLanes(INDArray array) { AllocationPoint point = allocator.getAllocationPoint(array); return point.getReadLane(); } protected cudaEvent_t getWriteLane(INDArray array) { AllocationPoint point = allocator.getAllocationPoint(array); return point.getWriteLane(); } protected int hasActiveWrite(INDArray array) { if (array == null) return -1; cudaEvent_t event = getWriteLane(array); if (event == null || event.isDestroyed()) return -1; return event.getLaneId(); } protected int hasActiveWrite(AllocationPoint point) { cudaEvent_t event = point.getWriteLane(); if (event == null || event.isDestroyed()) return -1; return event.getLaneId(); } protected boolean hasActiveReads(AllocationPoint point) { Queue<cudaEvent_t> events = point.getReadLane(); if (events.size() == 0) return false; AtomicBoolean result = new AtomicBoolean(false); List<cudaEvent_t> asList = new ArrayList<>(events); for (cudaEvent_t event : asList) { if (event == null) continue; // we mark this AllocationPoint is pending read, if at least one event isn't destroyed yet result.compareAndSet(false, !event.isDestroyed()); } return result.get(); } protected boolean hasActiveReads(INDArray array) { if (array == null) return false; AllocationPoint point = allocator.getAllocationPoint(array); return hasActiveReads(point); } protected boolean isMatchingLanes(int[] lanes) { if (lanes[0] == lanes[1] || lanes[1] == -1 || lanes[0] == -1) return true; return false; } protected boolean isMatchingLanes(int zLane, int[] lanes) { if ((zLane == lanes[0] || zLane == lanes[1]) && isMatchingLanes(lanes)) return true; return false; } protected void synchronizeReadLanes(AllocationPoint point) { cudaEvent_t event; int cnt = 0; while ((event = point.getReadLane().poll()) != null) { event.synchronize(); event.destroy(); cnt++; } // log.info("Events synchronized: [{}]", cnt); } protected void synchronizeReadLanes(INDArray array) { if (array == null) return; AllocationPoint point = allocator.getAllocationPoint(array); synchronizeReadLanes(point); } @Override public void registerAction(CudaContext context, AllocationPoint result, AllocationPoint... operands) { cudaEvent_t event = new cudaEvent_t(nativeOps.createEvent()); event.setLaneId(context.getLaneId()); nativeOps.registerEvent(event, context.getOldStream()); result.setWriteLane(event); Integer deviceId = allocator.getDeviceId(); fillTail(deviceId, event.getLaneId(), event); } @Override public CudaContext prepareAction(AllocationPoint result, AllocationPoint... operands) { if (hasActiveReads(result)) synchronizeReadLanes(result); ContextPack pack = allocator.getContextPool().acquireContextPackForDevice(allocator.getDeviceId()); return pack.getContextForLane(pack.nextRandomLane()); } protected int pickFirstLane(int[] lanes) { if (lanes[0] >= 0) return lanes[0]; else if (lanes[1] >= 0) return lanes[1]; return 0; } @Override public CudaContext prepareAction(INDArray result, INDArray... operands) { /** * This method should decide, which CUDA stream should be used for execution, based on data affinity * Decision is made based on data affinity, at INDArray level solely. */ ContextPack pack = allocator.getContextPool().acquireContextPackForDevice(allocator.getDeviceId()); // for result holding lane do not really matters, only depending lanes to matter, because they are used to read // default lane is lane_0 int newLane = 0; int zLane = hasActiveWrite(result); boolean zReads = hasActiveReads(result); if (result != null && (zReads || zLane >= 0)) { // we send this op to the same lane as active read/write event // but we still have to check, if op.X and op.Y has pending writes on other lanes // log.info("Busy Z dep: [{}], hasReads: [{}]", zLane, zReads); AtomicInteger cnt = new AtomicInteger(0); AtomicInteger holdersCount = new AtomicInteger(0); int lastLane = -1; //int pendingLanes[] = new int[]{-1, -1}; // FIXME: this is wrong. int pendingLanes[] = new int[operands.length + 1]; Arrays.fill(pendingLanes, -1); for (INDArray operand : operands) { if (operand == null) continue; int lane = hasActiveWrite(operand); if (lane >= 0) { // at least one operand has pendingWrite. And we don't care about pending reads. pendingLanes[cnt.get()] = lane; holdersCount.incrementAndGet(); lastLane = lane; } cnt.incrementAndGet(); } if (zReads) { // log.info("Synchronizing zReads"); synchronizeReadLanes(result); } if (holdersCount.get() > 0) { asyncMiss.incrementAndGet(); if (isMatchingLanes(zLane, pendingLanes)) { // log.info("All matching lanes additional deps in [{}] -> [{}, {}]", zLane, pendingLanes[0], pendingLanes[1]); if (zLane >= 0) newLane = zLane; else newLane = pickFirstLane(pendingLanes); } else { // log.info("Mismatching lanes additional deps in [{}] -> [{}, {}]", zLane, pendingLanes[0], pendingLanes[1]); // now we must sync on both pendingLanes and pass data to zLane if (zLane >= 0) newLane = zLane; else newLane = pickFirstLane(pendingLanes); for (INDArray operand : operands) { if (operand == null) continue; waitTillFinished(allocator.getAllocationPoint(operand)); } } } else { // log.info("Only Z is holder: [{}]", zLane); asyncHit.incrementAndGet(); if (zLane < 0) zLane = pack.nextRandomLane(); newLane = zLane; } } else { // we go and check op.X and op.Y AtomicInteger cnt = new AtomicInteger(0); AtomicInteger holdersCount = new AtomicInteger(0); int lastLane = -1; // FIXME: this is wrong. //int pendingLanes[] = new int[]{-1, -1, -1, -1}; int pendingLanes[] = new int[operands.length + 1]; Arrays.fill(pendingLanes, -1); for (INDArray operand : operands) { if (operand == null) continue; int lane = hasActiveWrite(operand); if (lane >= 0) { // at least one operand has pendingWrite. And we don't care about pending reads. pendingLanes[cnt.get()] = lane; holdersCount.incrementAndGet(); lastLane = lane; } cnt.incrementAndGet(); } if (holdersCount.get() > 0) { // we have some holders here asyncMiss.incrementAndGet(); if (isMatchingLanes(pendingLanes)) { // if op.X and/or op.Y has pending write in same lane - just throw op to that lane, and enjoy newLane = lastLane; // log.info("Paired dependencies: [{}]", newLane); } else { // we have different lanes for op.X and op.Y with pending write. We need to synchronize somewhere to become free. // basically - synchronize on one lane, and throw task to another one //log.info("Unpaired dependencies: [{}, {}]", pendingLanes[0], pendingLanes[1]); if (pendingLanes[0] >= 0) { waitTillFinished(allocator.getAllocationPoint(operands[0])); newLane = pendingLanes[1]; } else if (pendingLanes[1] >= 0) { waitTillFinished(allocator.getAllocationPoint(operands[1])); newLane = pendingLanes[0]; } } } else { // we don't have any holders here. Totally free execution here asyncHit.incrementAndGet(); newLane = pack.nextRandomLane(); // log.info("Free pass here: [{}]", newLane); } } CudaContext context = pack.getContextForLane(newLane); if (result != null) allocator.getAllocationPoint(result).setCurrentContext(context); for (INDArray operand : operands) { if (operand == null) continue; allocator.getAllocationPoint(operand).setCurrentContext(context); } if (!lanesCounter.containsKey(newLane)) { lanesCounter.put(newLane, new AtomicLong(0)); } lanesCounter.get(newLane).incrementAndGet(); if (context == null) throw new IllegalStateException("Context shouldn't be null: " + newLane); return context; } @Override public CudaContext prepareActionAllWrite(INDArray... operands) { return null; } private float getAsyncHitRatio() { long totalHits = asyncHit.get() + asyncMiss.get(); float cacheRatio = asyncHit.get() * 100 / (float) totalHits; return cacheRatio; } protected void fillTail(int deviceId, int lane, cudaEvent_t event) { eventsBarrier.get(deviceId).get(lane).add(event); long tick = deviceClocks.get(deviceId).incrementAndGet(); laneClocks.get(deviceId).get(lane).set(tick); } /** * This method ensures the events in the beginning of FIFO queues are finished */ protected void sweepTail() { Integer deviceId = allocator.getDeviceId(); int cnt = 0; // we get number of issued commands for specific device long lastCommandId = deviceClocks.get(deviceId).get(); for (int l = 0; l < configuration.getCommandLanesNumber(); l++) { Queue<cudaEvent_t> queue = eventsBarrier.get(deviceId).get(l); if (queue.size() >= MAX_EXECUTION_QUEUE || laneClocks.get(deviceId).get(l).get() < lastCommandId - MAX_EXECUTION_QUEUE) { cudaEvent_t event = queue.poll(); if (event != null && !event.isDestroyed()) { event.synchronize(); event.destroy(); cnt++; } } } deviceClocks.get(deviceId).incrementAndGet(); // log.info("Events sweeped: [{}]", cnt); } protected void cutTail() { Integer deviceId = allocator.getDeviceId(); for (int l = 0; l < configuration.getCommandLanesNumber(); l++) { Queue<cudaEvent_t> queue = eventsBarrier.get(deviceId).get(l); cudaEvent_t event; while ((event = queue.poll()) != null) { event.synchronize(); event.destroy(); } } } @Override public void commitTransfer(cudaStream_t streamUsed) { sweepTail(); streamUsed.synchronize(); } }
8,616
1,025
//================================================================================== // Copyright (c) 2016 , Advanced Micro Devices, Inc. All rights reserved. // /// \author AMD Developer Tools Team /// \file afProjectManager.cpp /// //================================================================================== // Qt: #include <QtWidgets> // infra: #include <AMDTBaseTools/Include/gtAssert.h> #include <AMDTApplicationComponents/Include/acMessageBox.h> #include <AMDTAPIClasses/Include/Events/apEventsHandler.h> #include <AMDTAPIClasses/Include/Events/apExecutionModeChangedEvent.h> // Local: #include <AMDTApplicationFramework/Include/afApplicationCommands.h> #include <AMDTApplicationFramework/Include/afAppStringConstants.h> #include <AMDTApplicationFramework/Include/afExecutionModeManager.h> #include <AMDTApplicationFramework/Include/afGlobalVariableChangedEvent.h> #include <AMDTApplicationFramework/Include/afGlobalVariablesManager.h> #include <AMDTApplicationFramework/Include/afProjectManager.h> #include <AMDTApplicationFramework/Include/afRecentProjectsActionsExecutor.h> // Static members initializations: afProjectManager* afProjectManager::m_spMySingleInstance = nullptr; // --------------------------------------------------------------------------- // Name: afProjectManager::afProjectManager // Description: Constructor // Author: <NAME> // Date: 3/4/2012 // --------------------------------------------------------------------------- afProjectManager::afProjectManager() { } // --------------------------------------------------------------------------- // Name: afProjectManager::~afProjectManager // Description: Destructor // Author: <NAME> // Date: 3/4/2012 // --------------------------------------------------------------------------- afProjectManager::~afProjectManager() { // There is an issue with deleting the settings pages, since they are also Qt windows. // Instead, just clear the vector: m_projectSettingsExtensions.clear(); } // --------------------------------------------------------------------------- // Name: afProjectManager::instance // Description: Get singleton instance // Return Val: afProjectManager& // Author: <NAME> // Date: 3/4/2012 // --------------------------------------------------------------------------- afProjectManager& afProjectManager::instance() { // If my single instance was not created yet - create it: if (m_spMySingleInstance == nullptr) { m_spMySingleInstance = new afProjectManager; GT_ASSERT(m_spMySingleInstance); } return *m_spMySingleInstance; } // --------------------------------------------------------------------------- // Name: afProjectManager::registerProjectSettingsExtension // Description: Get the current project settings. By default, updates them from the // current implementation // Author: <NAME> // Date: 21/5/2012 // --------------------------------------------------------------------------- const apProjectSettings& afProjectManager::currentProjectSettings() const { return m_currentProject; } // --------------------------------------------------------------------------- // Name: afProjectManager::registerProjectSettingsExtension // Description: Register an extension for project settings // Arguments: afProjectSettingsExtension* pProjectSettingsExtension // Return Val: void // Author: <NAME> // Date: 4/4/2012 // --------------------------------------------------------------------------- void afProjectManager::registerProjectSettingsExtension(afProjectSettingsExtension* pProjectSettingsExtension) { // Sanity check: GT_IF_WITH_ASSERT(pProjectSettingsExtension != nullptr) { // Initialize the extension: pProjectSettingsExtension->Initialize(); m_projectSettingsExtensions.push_back(pProjectSettingsExtension); } } // --------------------------------------------------------------------------- // Name: afProjectManager::registerToListenExeChanged // Description: Register an extension for project settings // that is informed whenever executble is changed // Return Val: void // Author: <NAME> // Date: 5/10/2013 // --------------------------------------------------------------------------- void afProjectManager::registerToListenExeChanged(afProjectSettingsExtension* pProjectSettingsExtension) { // Sanity check: GT_IF_WITH_ASSERT(pProjectSettingsExtension != nullptr) { QObject::connect(this, SIGNAL(ExecutableChanged(const QString&, bool, bool)), pProjectSettingsExtension, SLOT(OnExecutableChanged(const QString&, bool, bool))); } } // --------------------------------------------------------------------------- // Name: afProjectManager::getExtensionSettingsWidget // Description: Return the widget handling the project settings for extension with the requested index // Arguments: int extensionIndex // Return Val: QWidget* // Author: <NAME> // Date: 4/4/2012 // --------------------------------------------------------------------------- QWidget* afProjectManager::getExtensionSettingsWidget(int extensionIndex, gtString& extensionDisplayName) { QWidget* pRetVal = nullptr; GT_IF_WITH_ASSERT((extensionIndex >= 0) && (extensionIndex < (int)m_projectSettingsExtensions.size())) { // Get the extension: afProjectSettingsExtension* pExtension = m_projectSettingsExtensions[extensionIndex]; GT_IF_WITH_ASSERT(pExtension != nullptr) { pRetVal = pExtension; extensionDisplayName = pExtension->ExtensionTreePathAsString(); } } return pRetVal; } // --------------------------------------------------------------------------- // Name: afProjectManager::saveCurrentProjectData // Description: Save the project data for the requested extension // Arguments: QWidget* pWidget // int extensionIndex // Return Val: bool - Success / failure. // Author: <NAME> // Date: 4/4/2012 // --------------------------------------------------------------------------- bool afProjectManager::saveCurrentProjectData(int extensionIndex) { bool retVal = false; GT_IF_WITH_ASSERT((extensionIndex >= 0) && (extensionIndex < (int)m_projectSettingsExtensions.size())) { // Get the extension object: afProjectSettingsExtension* pExtension = m_projectSettingsExtensions[extensionIndex]; GT_IF_WITH_ASSERT(pExtension != nullptr) { retVal = pExtension->SaveCurrentSettings(); } } return retVal; } // --------------------------------------------------------------------------- // Name: afProjectManager::currentProjectDataAsXMLString // Description: Return the requested extension settings string as XML // Arguments: int extensionIndex // const gtString& settingsAsXML // Return Val: bool - Success / failure. // Author: <NAME> // Date: 5/4/2012 // --------------------------------------------------------------------------- bool afProjectManager::currentProjectDataAsXMLString(int extensionIndex, gtString& settingsAsXML) { bool retVal = false; GT_IF_WITH_ASSERT((extensionIndex >= 0) && (extensionIndex < (int)m_projectSettingsExtensions.size())) { // Get the extension object: afProjectSettingsExtension* pExtension = m_projectSettingsExtensions[extensionIndex]; GT_IF_WITH_ASSERT(pExtension != nullptr) { retVal = pExtension->GetXMLSettingsString(settingsAsXML); } } return retVal; } // --------------------------------------------------------------------------- // Name: afProjectManager::setCurrentProjectDataFromXMLString // Description: Save the current settings for the current extension // Arguments: const gtString& extensionName // const gtString& settingsAsXML // Return Val: bool - Success / failure. // Author: <NAME> // Date: 8/4/2012 // --------------------------------------------------------------------------- bool afProjectManager::setCurrentProjectDataFromXMLString(const gtString& extensionName, const gtString& settingsAsXML, bool& wasProjectFound) { bool retVal = false; wasProjectFound = false; // Look for the extension with this extension name: for (int i = 0 ; i < (int)m_projectSettingsExtensions.size(); i++) { // Get the current extension: afProjectSettingsExtension* pCurrentExtension = m_projectSettingsExtensions[i]; GT_IF_WITH_ASSERT(pCurrentExtension != nullptr) { if (pCurrentExtension->ExtensionXMLString() == extensionName) { retVal = pCurrentExtension->SetSettingsFromXMLString(settingsAsXML); wasProjectFound = true; break; } } } return retVal; } // --------------------------------------------------------------------------- // Name: afProjectManager::restoreDefaultExtensionsProjectSettings // Description: Restore each of the extensions default settings // Return Val: bool - Success / failure. // Author: <NAME> // Date: 11/4/2012 // --------------------------------------------------------------------------- void afProjectManager::restoreDefaultExtensionsProjectSettings() { // Restore each of the extensions settings: for (int i = 0 ; i < (int)m_projectSettingsExtensions.size(); i++) { // Get the current extension: afProjectSettingsExtension* pCurrentExtension = m_projectSettingsExtensions[i]; GT_IF_WITH_ASSERT(pCurrentExtension != nullptr) { pCurrentExtension->RestoreDefaultProjectSettings(); } } } // --------------------------------------------------------------------------- // Name: afProjectManager::setCurrentProject // Description: Set the current project // Arguments: const apProjectSettings& projectSettings // Author: <NAME> // Date: 8/4/2012 // --------------------------------------------------------------------------- void afProjectManager::setCurrentProject(const apProjectSettings& projectSettings) { // Save the file: afApplicationCommands* pApplicationCommands = afApplicationCommands::instance(); GT_IF_WITH_ASSERT(pApplicationCommands != nullptr) { // Check if this is another project: bool isProjectNameChanged = projectSettings.projectName() != m_currentProject.projectName(); // Set th project settings: m_currentProject = projectSettings; // Set the current project file path: osFilePath newProjectFilePath; if (!m_currentProject.projectName().isEmpty()) { afGetUserDataFolderPath(newProjectFilePath); newProjectFilePath.setFileName(m_currentProject.projectName()); newProjectFilePath.setFileExtension(AF_STR_projectFileExtension); // Set the current project: setCurrentProjectFilePath(newProjectFilePath); // If the project opened is a sample saved in CodeXL older version, fix the sample path to the new samples location FixSamplesPath(); } else { m_currentProjectFilePath.clear(); } // Throw an event if this is a new project: if (isProjectNameChanged) { // Create a global variable changed event: afGlobalVariableChangedEvent eve(afGlobalVariableChangedEvent::CURRENT_PROJECT); // Trigger variable change event: apEventsHandler::instance().handleDebugEvent(eve); } if (!projectSettings.projectName().isEmpty()) { // Send an execution mode changed event from the manager since the project is new and still does not have last active settings: gtString lastSessionType = projectSettings.lastActiveSessionType(); gtString modeName = projectSettings.lastActiveMode(); if (modeName.isEmpty()) { modeName = afExecutionModeManager::instance().activeMode()->modeName(); lastSessionType = afExecutionModeManager::instance().activeMode()->selectedSessionTypeName(); } apExecutionModeChangedEvent executionModeEvent(modeName, lastSessionType); apEventsHandler::instance().registerPendingDebugEvent(executionModeEvent); } // If this is an empty project, restore to default settings in extensions: if (m_currentProject.projectName().isEmpty()) { restoreDefaultExtensionsProjectSettings(); afApplicationCommands::instance()->applicationTree()->clearTreeItems(false); } // Save the XML file: pApplicationCommands->OnFileSaveProject(); } } // --------------------------------------------------------------------------- // Name: afProjectManager::setCurrentProjectFilePath // Description: Set the current project file path // Arguments: const osFilePath& filePath // Author: <NAME> // Date: 8/4/2012 // --------------------------------------------------------------------------- void afProjectManager::setCurrentProjectFilePath(const osFilePath& filePath) { // Set the file path: m_currentProjectFilePath = filePath; } // --------------------------------------------------------------------------- // Name: afProjectManager::setOriginalProjectFilePath // Description: Set the original project file path // Arguments: const osFilePath& filePath // Author: <NAME> // Date: 8/7/2012 // --------------------------------------------------------------------------- void afProjectManager::setOriginalProjectFilePath(const osFilePath& filePath) { // Set the file path: m_originalProjectFilePath = filePath; } // --------------------------------------------------------------------------- // Name: afProjectManager::UpdateRecentlyUsedProjects // Description: Update the recently used project names // Author: <NAME> // Date: 10/4/2012 // --------------------------------------------------------------------------- bool afProjectManager::UpdateRecentlyUsedProjects() { bool retVal = false; // Sanity check GT_IF_WITH_ASSERT(m_pRecentlyUsedProjectsManager != nullptr) { retVal = m_pRecentlyUsedProjectsManager->UpdateRecentlyUsedProjects(); } return retVal; } // --------------------------------------------------------------------------- // Name: afProjectManager::areSettingsValid // Description: Check if current settings are valid // Arguments: gtString& invalidMessageStr // Return Val: bool - Success / failure. // Author: <NAME> // Date: 11/4/2012 // --------------------------------------------------------------------------- bool afProjectManager::areSettingsValid(gtString& invalidMessageStr, gtString& invalidExtensionTreePath) { bool retVal = true; // Restore each of the extensions settings: for (int i = 0 ; i < (int)m_projectSettingsExtensions.size(); i++) { // Get the current extension: afProjectSettingsExtension* pCurrentExtension = m_projectSettingsExtensions[i]; GT_IF_WITH_ASSERT(pCurrentExtension != nullptr) { retVal = pCurrentExtension->AreSettingsValid(invalidMessageStr); if (!retVal) { // Break on the first error: invalidExtensionTreePath = pCurrentExtension->ExtensionTreePathAsString(); break; } } } return retVal; } bool afProjectManager::DoesProjectContainData(const gtString& projectName, gtString& typeOfProjectSavedData) { bool retVal = false; typeOfProjectSavedData.makeEmpty(); // Restore each of the extensions settings: for (int i = 0; i < (int)m_projectSettingsExtensions.size(); i++) { // Get the current extension: afProjectSettingsExtension* pCurrentExtension = m_projectSettingsExtensions[i]; GT_IF_WITH_ASSERT(pCurrentExtension != nullptr) { gtString dataTypeStr; bool rc = pCurrentExtension->DoesProjectContainData(projectName, dataTypeStr); if (rc) { retVal = true; if (!typeOfProjectSavedData.isEmpty()) { // Append the lost data type to the list of data types with a "," typeOfProjectSavedData.append(AF_STR_Comma); typeOfProjectSavedData.append(AF_STR_Space); } typeOfProjectSavedData.append(dataTypeStr); } } } return retVal; } // --------------------------------------------------------------------------- // Name: afProjectManager::restoreCurrentExtensionSettings // Description: Restores each of the extensions' GUI to reflect the current // project settings // Return Val: bool - Success / failure. // Author: <NAME> // Date: 4/6/2012 // --------------------------------------------------------------------------- bool afProjectManager::restoreCurrentExtensionSettings() { bool retVal = true; // Restore each of the extensions settings: for (int i = 0 ; i < (int)m_projectSettingsExtensions.size(); i++) { // Get the current extension: afProjectSettingsExtension* pCurrentExtension = m_projectSettingsExtensions[i]; GT_IF_WITH_ASSERT(pCurrentExtension != nullptr) { bool rc = pCurrentExtension->RestoreCurrentSettings(); retVal = retVal && rc; } } return retVal; } void afProjectManager::EmitExecutableChanged(const QString& exeName, bool isChangeFinal, bool isUserModelId) { emit ExecutableChanged(exeName, isChangeFinal, isUserModelId); } void afProjectManager::emitGuiChangeRequiredForRemoteSession(bool isRemoteSession) { // Emit the signal. emit OnRemoteHostCheckBoxCheckChange(isRemoteSession); } QString afProjectManager::GetProjectNameWithRemoteHost(const QString& origProjectName, const QString& hostID) { // Get the project name with no host: QString retVal = GetProjectNameWithLocalHost(origProjectName); if (hostID != AF_STR_modeToolbarHostLocal) { // Append the requested host to the project name: if (!hostID.contains(AF_STR_Shtrudel)) { retVal.append(AF_STR_Shtrudel); } retVal.append(hostID); } return retVal; } QString afProjectManager::GetProjectNameWithLocalHost(const QString& origProjectName) { QString retVal = origProjectName; // Find '@' in project name: int shtrudelPos = origProjectName.indexOf(AF_STR_Shtrudel); if (shtrudelPos >= 0) { // Chop the host ID: retVal.chop(retVal.length() - shtrudelPos); } return retVal; } QString afProjectManager::GetHostFromProjectName(const QString& projectName) { QString retVal = projectName; // Find '@' in project name: int shtrudelPos = projectName.indexOf(AF_STR_Shtrudel); if (shtrudelPos >= 0) { // Get the string from the shtrudel ahead: retVal = retVal.mid(shtrudelPos); } else { retVal = AF_STR_modeToolbarHostLocal; } return retVal; } void afProjectManager::FixSamplesPath() { // If the exe name is set, and doesn't exist, we want to fix it, in case of one of our samples if (!m_currentProject.executablePath().isEmpty() && !m_currentProject.executablePath().exists()) { // Try to see if the exe name is one of our samples exe names afCodeXLSampleID currentSampleID = AF_SAMPLE_NONE; gtString exeName; m_currentProject.executablePath().getFileName(exeName); if (exeName.find(AF_STR_CodeXLTeapotExampleBinaryName) >= 0) { currentSampleID = AF_TEAPOT_SAMPLE; } if (currentSampleID != AF_SAMPLE_NONE) { // Ask the user permission to fix the paths int userAnswer = acMessageBox::instance().question(afGlobalVariablesManager::instance().ProductNameA(), AF_STR_OldSampleProjectQuestion, QMessageBox::Yes | QMessageBox::No); if (userAnswer == QMessageBox::Yes) { // Get the sample properties gtString sampleName, sampleMode, sampleSessionType, sampleDirName, sampleBinaryName, sampleProjectName, buildOptions; afApplicationCommands::instance()->GetSampleProperties(currentSampleID, sampleName, sampleMode, sampleSessionType, sampleDirName, sampleBinaryName, sampleProjectName, buildOptions); // Build the exe path, working folder and source code directories osFilePath samplePath; bool rc = samplePath.SetInstallRelatedPath(osFilePath::OS_CODEXL_EXAMPLES_PATH, false); GT_IF_WITH_ASSERT(rc) { // Find the exe path and res path (in Linux it is different then windows and the string const reflects that): samplePath.appendSubDirectory(sampleDirName); samplePath.appendSubDirectory(AF_STR_CODEXLExampleReleaseDirName); osDirectory clFilesDirectory; osFilePath clFilesPath = samplePath; clFilesPath.appendSubDirectory(AF_STR_CodeXLSampleResourcesDirName); samplePath.setFileName(sampleBinaryName); samplePath.setFileExtension(AF_STR_CodeXLSampleBinaryExtension); // Set the exe path m_currentProject.setExecutablePath(samplePath); m_currentProject.setWorkDirectoryFromString(samplePath.fileDirectoryAsString()); m_currentProject.SetSourceFilesDirectories(clFilesPath.fileDirectoryAsString()); } // Build the KA extension strings, with the builds options, compiler type and cl files list: // Save the project after fixing the paths afApplicationCommands::instance()->OnFileSaveProject(); } } } }
7,636
348
<gh_stars>100-1000 {"nom":"Delle","circ":"1ère circonscription","dpt":"Territoire de Belfort","inscrits":3825,"abs":2258,"votants":1567,"blancs":24,"nuls":11,"exp":1532,"res":[{"nuance":"MDM","nom":"<NAME>","voix":385},{"nuance":"FN","nom":"<NAME>","voix":311},{"nuance":"LR","nom":"<NAME>","voix":309},{"nuance":"SOC","nom":"M. <NAME>","voix":212},{"nuance":"FI","nom":"Mme <NAME>","voix":209},{"nuance":"COM","nom":"Mme <NAME>","voix":40},{"nuance":"EXG","nom":"Mme <NAME>","voix":23},{"nuance":"DIV","nom":"M. <NAME>","voix":21},{"nuance":"DIV","nom":"M. <NAME>","voix":12},{"nuance":"DIV","nom":"M. <NAME>","voix":10},{"nuance":"DIV","nom":"M. <NAME>","voix":0}]}
266
8,865
// This file is used to check if we can produce working executables // for i386 and x86_64 archs on Linux. #include <stdlib.h> int main(){}
44
419
#pragma once //------------------------------------------------------------------------- #if KRG_DLL #ifdef KRG_TOOLS_CORE #define KRG_TOOLS_CORE_API __declspec(dllexport) #else #define KRG_TOOLS_CORE_API __declspec(dllimport) #endif #else #define KRG_TOOLS_CORE_API #endif
139
1,007
/* * Copyright (C) 2021 ByteDance Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <dlfcn.h> #include <errno.h> #include <libgen.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <fcntl.h> #include <unistd.h> #include <string> #include <thread> #define LOG_TAG "Rhea.ATrace" #include <utils/debug.h> #include <utils/build.h> #include <utils/fs.h> #include <utils/threads.h> #include "atrace.h" #include "hook/hook_bridge.h" #include "trace.h" #include "trace_provider.h" #include "recoder.h" namespace bytedance { namespace atrace { namespace { // Magic FD to simply write to tracer logger and bypassing real write constexpr int kTracerMagicFd = -100; constexpr ssize_t kAtraceMessageLen = 1024; constexpr char kAtraceHeader[] = "TRACE:\n# tracer: nop\n"; } ATrace& ATrace::Get() { static ATrace kInstance; return kInstance; } ATrace::ATrace() = default; ATrace::~ATrace() = default; int32_t ATrace::StartTrace() { int64_t start = elapsedRealtimeMicros(); main_thread_only_ = TraceProvider::Get().isMainThreadOnly(); if (atrace_started_) { ALOGW("atrace has been started."); return OK; } if (!PostCreateTrace(TraceProvider::Get().GetBufferSize())) { return START_WRITE_TRACE_FAILED; } int32_t result = InstallProbe(); if (result != OK) { ALOGE("failed to install rhea-trace, errno:%d", result); return result; } if (!first_start_trace_) { // On every start, except the first one, find if new libs were loaded // and install systrace hook for them HookBridge::Get().HookLoadedLibs(); } first_start_trace_ = false; auto prev = atrace_enabled_tags_->exchange(DEFAULT_ATRACE_TAG); if (prev != UINT64_MAX) { original_tags_ = prev; } atrace_started_ = true; ATRACE_BEGIN(("monotonic_time: " + std::to_string(systemTime(SYSTEM_TIME_MONOTONIC) / 1000000000.0)).c_str()); int64_t cost_us = elapsedRealtimeMicros() - start; ALOGD("start trace cost us: %lld", cost_us); return OK; } int32_t ATrace::StopTrace() { int64_t start = elapsedRealtimeMicros(); if (!atrace_started_) { ALOGE("please start trace firstly"); return OK; } uint64_t tags = original_tags_; if (tags != UINT64_MAX) { atrace_enabled_tags_->store(tags); } // if (!HookBridge::Get().UnhookLoadedLibs()) { // ALOGE("failed to unhook loaded libs"); // return UNHOOK_FAILED; // } ALOGD("log atrace cost us: %llu", log_trace_cost_us_); log_trace_cost_us_ = 0; PostFinishTrace(); atrace_started_ = false; int64_t cost_us = elapsedRealtimeMicros() - start; ALOGD("stop trace cost us: %lld", cost_us); return OK; } bool ATrace::IsATrace(int fd, size_t count) { return (atrace_marker_fd_ != nullptr && fd == *atrace_marker_fd_ && count > 0); } void ATrace::LogTrace(const void *buf, size_t count) { if (!atrace_started_) { return; } #define PRINT_LOG_TIME 0 #if PRINT_LOG_TIME int64_t start = elapsedRealtimeMicros(); #endif const char *msg = (const char*)buf; switch (msg[0]) { case 'B': { // begin synchronous event. format: "B|<pid>|<name>" break; } case 'E': { // end synchronous event. format: "E" break; } // the following events we don't currently log. case 'S': // start async event. format: "S|<pid>|<name>|<cookie>" case 'F': // finish async event. format: "F|<pid>|<name>|<cookie>" case 'C': // counter. format: "C|<pid>|<name>|<value>" default: return; } ssize_t len; char tmp_buf[kAtraceMessageLen] = {0}; if (main_thread_only_) { len = FillTimestampAndTid(tmp_buf, 0); } else { len = FillTimestampAndTid(tmp_buf, gettid()); } if ((len + count + 1) < kAtraceMessageLen) { memcpy(tmp_buf + len, msg, count); len += count; strcpy(tmp_buf + len, "\n"); len += 1; } else { ALOGE("atrace message is too long, total count is %ld", len + count + 1); return; } auto& logger = Logger::get(); logger.writeBytes( EntryType::STRING_NAME, 0, (const uint8_t*)tmp_buf, std::min(len, kAtraceMessageLen)); #if PRINT_LOG_TIME int64_t cost_us = elapsedRealtimeMicros() - start; ALOGE("log trace cost us: %lld", cost_us); log_trace_cost_us_ += cost_us; #endif } // Private functions int32_t ATrace::InstallProbe() { if (atrace_probe_installed_) { return OK; } if (!HookBridge::Get().HookLoadedLibs()) { ALOGE("failed to hook loaded libs"); return HOOK_FAILED; } int32_t result = InstallAtraceProbe(); if (result != OK) { ALOGE("failed to install atrace, errno:%d", result); return result; } atrace_probe_installed_ = true; return true; } /** * Copyright 2004-present, Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ int32_t ATrace::InstallAtraceProbe() { void *handle = nullptr; auto sdk = utils::Build::getAndroidSdk(); { std::string lib_name("libcutils.so"); std::string enabled_tags_sym("atrace_enabled_tags"); std::string marker_fd_sym("atrace_marker_fd"); if (sdk < 18) { lib_name = "libutils.so"; // android::Tracer::sEnabledTags enabled_tags_sym = "_ZN7android6Tracer12sEnabledTagsE"; // android::Tracer::sTraceFD marker_fd_sym = "_ZN7android6Tracer8sTraceFDE"; } if (sdk < 21) { handle = dlopen(lib_name.c_str(), RTLD_LOCAL); } else { handle = dlopen(nullptr, RTLD_GLOBAL); } // safe check the handle if (handle == nullptr) { ALOGE("'atrace_handle' is null"); return INSTALL_ATRACE_FAILED; } atrace_enabled_tags_ = reinterpret_cast<std::atomic<uint64_t> *>( dlsym(handle, enabled_tags_sym.c_str())); if (atrace_enabled_tags_ == nullptr) { ALOGE("'atrace_enabled_tags' is not defined"); dlclose(handle); return INSTALL_ATRACE_FAILED; } atrace_marker_fd_ = reinterpret_cast<int*>( dlsym(handle, marker_fd_sym.c_str())); if (atrace_marker_fd_ == nullptr) { ALOGE("'atrace_marker_fd' is not defined"); dlclose(handle); return INSTALL_ATRACE_FAILED; } if (*atrace_marker_fd_ == -1) { // This is a case that can happen for older Android version i.e. 4.4 // in which scenario the marker fd is not initialized/opened by Zygote. // Nevertheless for Profilo trace it is not necessary to have an open fd, // since all we really need is to ensure that we 'know' it is marker // fd to continue writing Profilo logs, thus the usage of marker fd // acting really as a placeholder for magic id. *atrace_marker_fd_ = kTracerMagicFd; } } dlclose(handle); return OK; } } // namespace atrace } // namespace bytedance
3,008
2,180
<filename>kafka-manager-common/src/main/java/com/xiaojukeji/kafka/manager/common/entity/vo/normal/consumer/ConsumerGroupDetailVO.java<gh_stars>1000+ package com.xiaojukeji.kafka.manager.common.entity.vo.normal.consumer; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; /** * @author zengqiao * @date 19/4/3 */ @ApiModel(value = "消费组的消费详情") public class ConsumerGroupDetailVO { @ApiModelProperty(value = "topic名称") private String topicName; @ApiModelProperty(value = "消费组名称") private String consumerGroup; @ApiModelProperty(value = "location") private String location; @ApiModelProperty(value = "分区Id") private Integer partitionId; @ApiModelProperty(value = "clientId") private String clientId; @ApiModelProperty(value = "消费偏移量") private Long consumeOffset; @ApiModelProperty(value = "partitionOffset") private Long partitionOffset; @ApiModelProperty(value = "lag") private Long lag; public String getTopicName() { return topicName; } public void setTopicName(String topicName) { this.topicName = topicName; } public String getConsumerGroup() { return consumerGroup; } public void setConsumerGroup(String consumerGroup) { this.consumerGroup = consumerGroup; } public String getLocation() { return location; } public void setLocation(String location) { this.location = location; } public Integer getPartitionId() { return partitionId; } public void setPartitionId(Integer partitionId) { this.partitionId = partitionId; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public Long getConsumeOffset() { return consumeOffset; } public void setConsumeOffset(Long consumeOffset) { this.consumeOffset = consumeOffset; } public Long getPartitionOffset() { return partitionOffset; } public void setPartitionOffset(Long partitionOffset) { this.partitionOffset = partitionOffset; } public Long getLag() { return lag; } public void setLag(Long lag) { this.lag = lag; } @Override public String toString() { return "ConsumerGroupDetailVO{" + "topicName='" + topicName + '\'' + ", consumerGroup='" + consumerGroup + '\'' + ", location='" + location + '\'' + ", partitionId=" + partitionId + ", clientId='" + clientId + '\'' + ", consumeOffset=" + consumeOffset + ", partitionOffset=" + partitionOffset + ", lag=" + lag + '}'; } }
1,173
982
from logging import warning import numpy as np import pandas as pd from aif360.datasets import StructuredDataset from sklearn.preprocessing import MinMaxScaler class RegressionDataset(StructuredDataset): """Base class for regression datasets.""" def __init__(self, df, dep_var_name, protected_attribute_names, privileged_classes, instance_weights_name='', categorical_features=[], na_values=[], custom_preprocessing=None, metadata=None): """ Subclasses of RegressionDataset should perform the following before calling `super().__init__`: 1. Load the dataframe from a raw file. Then, this class will go through a standard preprocessing routine which: 2. (optional) Performs some dataset-specific preprocessing (e.g. renaming columns/values, handling missing data). 3. Drops rows with NA values. 4. Creates a one-hot encoding of the categorical variables. 5. Maps protected attributes to binary privileged/unprivileged values (1/0). 6. Normalizes df values Args: df (pandas.DataFrame): DataFrame on which to perform standard processing. dep_var_name: Name of the dependent variable column in `df`. protected_attribute_names (list): List of names corresponding to protected attribute columns in `df`. privileged_classes (list(list or function)): Each element is a list of values which are considered privileged or a boolean function which return `True` if privileged for the corresponding column in `protected_attribute_names`. All others are unprivileged. Values are mapped to 1 (privileged) and 0 (unprivileged) if they are not already numerical. instance_weights_name (optional): Name of the instance weights column in `df`. categorical_features (optional, list): List of column names in the DataFrame which are to be expanded into one-hot vectors. na_values (optional): Additional strings to recognize as NA. See :func:`pandas.read_csv` for details. custom_preprocessing (function): A function object which acts on and returns a DataFrame (f: DataFrame -> DataFrame). If `None`, no extra preprocessing is applied. metadata (optional): Additional metadata to append. """ # 2. Perform dataset-specific preprocessing if custom_preprocessing: df = custom_preprocessing(df) # 3. Remove any rows that have missing data. dropped = df.dropna() count = df.shape[0] - dropped.shape[0] if count > 0: warning("Missing Data: {} rows removed from {}.".format(count, type(self).__name__)) df = dropped # 4. Create a one-hot encoding of the categorical variables. df = pd.get_dummies(df, columns=categorical_features, prefix_sep='=') # 5. Map protected attributes to privileged/unprivileged privileged_protected_attributes = [] unprivileged_protected_attributes = [] for attr, vals in zip(protected_attribute_names, privileged_classes): privileged_values = [1.] unprivileged_values = [0.] if callable(vals): df[attr] = df[attr].apply(vals) elif np.issubdtype(df[attr].dtype, np.number): # this attribute is numeric; no remapping needed privileged_values = vals unprivileged_values = list(set(df[attr]).difference(vals)) else: # find all instances which match any of the attribute values priv = np.logical_or.reduce(np.equal.outer(vals, df[attr].to_numpy())) df.loc[priv, attr] = privileged_values[0] df.loc[~priv, attr] = unprivileged_values[0] privileged_protected_attributes.append( np.array(privileged_values, dtype=np.float64)) unprivileged_protected_attributes.append( np.array(unprivileged_values, dtype=np.float64)) # 6. Normalize df values df = pd.DataFrame(MinMaxScaler().fit_transform(df.values), columns=list(df), index=df.index) super(RegressionDataset, self).__init__(df=df, label_names=[dep_var_name], protected_attribute_names=protected_attribute_names, privileged_protected_attributes=privileged_protected_attributes, unprivileged_protected_attributes=unprivileged_protected_attributes, instance_weights_name=instance_weights_name, scores_names=[], metadata=metadata)
2,075
922
<gh_stars>100-1000 # Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from sparseml.base import Framework from sparseml.sparsification import sparsification_info as base_sparsification_info from sparseml.tensorflow_v1.sparsification import sparsification_info def test_sparsification_info(): base_info = base_sparsification_info(Framework.tensorflow_v1) info = sparsification_info() assert base_info == info assert len(info.modifiers) == 0 # TODO: update once this is available
304
2,813
<reponame>fernandes-natanael/jabref package org.jabref.logic.pdf; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.util.Objects; import org.jabref.architecture.AllowedToUseAwt; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSFloat; import org.apache.pdfbox.cos.COSInteger; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.apache.pdfbox.text.PDFTextStripperByArea; /** * Extracts the text of marked annotations using bounding boxes. */ @AllowedToUseAwt("org.apache.pdfbox.text.PDFTextStripperByArea.addRegion uses AWT's Rectangle to indicate a region") public final class TextExtractor { private final COSArray boundingBoxes; private final PDPage page; /** * @param page the page the annotation is on, must not be null * @param boundingBoxes the raw annotation, must not be null */ public TextExtractor(PDPage page, COSArray boundingBoxes) { this.page = Objects.requireNonNull(page); this.boundingBoxes = Objects.requireNonNull(boundingBoxes); } /** * Extracts the text of a marked annotation such as highlights, underlines, strikeouts etc. * * @return The text of the annotation * @throws IOException If the PDFTextStripperByArea fails to initialize. */ public String extractMarkedText() throws IOException { // Text has to be extracted by the rectangle calculated from the marking PDFTextStripperByArea stripperByArea = new PDFTextStripperByArea(); String markedText = ""; // Iterates over the array of segments. Each segment consists of 8 points forming a bounding box. int totalSegments = boundingBoxes.size() / 8; for (int currentSegment = 1, segmentPointer = 0; currentSegment <= totalSegments; currentSegment++, segmentPointer += 8) { try { stripperByArea.addRegion("markedRegion", calculateSegmentBoundingBox(boundingBoxes, segmentPointer)); stripperByArea.extractRegions(page); markedText = markedText.concat(stripperByArea.getTextForRegion("markedRegion")); } catch (IllegalArgumentException e) { throw new IOException("Cannot read annotation coordinates!", e); } } return markedText.trim(); } private Rectangle2D calculateSegmentBoundingBox(COSArray quadsArray, int segmentPointer) { // Extract coordinate values float upperLeftX = toFloat(quadsArray.get(segmentPointer)); float upperLeftY = toFloat(quadsArray.get(segmentPointer + 1)); float upperRightX = toFloat(quadsArray.get(segmentPointer + 2)); float upperRightY = toFloat(quadsArray.get(segmentPointer + 3)); float lowerLeftX = toFloat(quadsArray.get(segmentPointer + 4)); float lowerLeftY = toFloat(quadsArray.get(segmentPointer + 5)); // Post-processing of the raw coordinates. PDRectangle pageSize = page.getMediaBox(); float ulx = upperLeftX - 1; // It is magic. float uly = pageSize.getHeight() - upperLeftY; float width = upperRightX - lowerLeftX; float height = upperRightY - lowerLeftY; return new Rectangle2D.Float(ulx, uly, width, height); } private float toFloat(Object cosNumber) { if (cosNumber instanceof COSFloat) { return ((COSFloat) cosNumber).floatValue(); } if (cosNumber instanceof COSInteger) { return ((COSInteger) cosNumber).floatValue(); } throw new IllegalArgumentException("The number type of the annotation is not supported!"); } }
1,391
9,734
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #pragma once #include <memory> #include <mutex> #include <unordered_map> #include <utility> #include "gandiva/configuration.h" namespace gandiva { class ConfigHolder { public: static int64_t MapInsert(std::shared_ptr<Configuration> config) { g_mtx_.lock(); int64_t result = config_id_++; configuration_map_.insert( std::pair<int64_t, std::shared_ptr<Configuration>>(result, config)); g_mtx_.unlock(); return result; } static void MapErase(int64_t config_id_) { g_mtx_.lock(); configuration_map_.erase(config_id_); g_mtx_.unlock(); } static std::shared_ptr<Configuration> MapLookup(int64_t config_id_) { std::shared_ptr<Configuration> result = nullptr; try { result = configuration_map_.at(config_id_); } catch (const std::out_of_range&) { } return result; } private: // map of configuration objects created so far static std::unordered_map<int64_t, std::shared_ptr<Configuration>> configuration_map_; static std::mutex g_mtx_; // atomic counter for projector module ids static int64_t config_id_; }; } // namespace gandiva
616
1,062
<reponame>larkov/MailTrackerBlocker // // Generated by class-dump 3.5b1 (64 bit) (Debug version compiled Dec 3 2019 19:59:57). // // Copyright (C) 1997-2019 <NAME>. // #import <MailFW/MFAccount.h> #import <MailFW/EDReceivingAccount-Protocol.h> #import <MailFW/MCActivityTarget-Protocol.h> #import <MailFW/MCMailAccount-Protocol.h> #import <MailFW/MFMessageDelivererDelegate-Protocol.h> #import <MailFW/NSFileManagerDelegate-Protocol.h> @class ACAccount, ECAuthScheme, MCTaskManager, MFDeliveryAccount, MFMailbox, NSArray, NSDate, NSError, NSNumber, NSObject, NSOperationQueue, NSString, NSURL; @protocol OS_dispatch_queue, OS_dispatch_source; @interface MFMailAccount : MFAccount <EDReceivingAccount, MCActivityTarget, MCMailAccount, MFMessageDelivererDelegate, NSFileManagerDelegate> { id _mailAccountLock; // 8 = 0x8 id _mailboxLock; // 16 = 0x10 MFMailbox *_inboxMailbox; // 24 = 0x18 MFMailbox *_rootMailbox; // 32 = 0x20 MFMailbox *_draftsMailbox; // 40 = 0x28 MFMailbox *_sentMessagesMailbox; // 48 = 0x30 MFMailbox *_trashMailbox; // 56 = 0x38 MFMailbox *_junkMailbox; // 64 = 0x40 MFMailbox *_notesMailbox; // 72 = 0x48 MFMailbox *_todosMailbox; // 80 = 0x50 MFMailbox *_outboxMailbox; // 88 = 0x58 MFMailbox *_archiveMailbox; // 96 = 0x60 NSError *_connectionError; // 104 = 0x68 id _cacheChangeLock; // 112 = 0x70 NSObject<OS_dispatch_queue> *_cacheWriteQueue; // 120 = 0x78 NSObject<OS_dispatch_source> *_cacheWriteTimer; // 128 = 0x80 id _backgroundActivityFlagsLock; // 136 = 0x88 BOOL _cacheHasBeenRead; // 144 = 0x90 BOOL _mailboxListInitializationInProgress; // 145 = 0x91 BOOL _needsChecking; // 146 = 0x92 BOOL _isInitializingmailboxList; // 147 = 0x93 BOOL _usesMailboxCache; // 148 = 0x94 NSURL *_accountDirectory; // 152 = 0x98 MCTaskManager *_taskManager; // 160 = 0xa0 NSOperationQueue *_backgroundWorkQueue; // 168 = 0xa8 } + (id)csAccountTypeString; // IMP=0x000000000015bf27 + (id)keyPathsForValuesAffectingURLPersistenceHostname; // IMP=0x000000000015b90c + (id)_mailboxNameForPathComponent:(id)arg1; // IMP=0x0000000000156436 + (id)_pathComponentForMailboxName:(id)arg1; // IMP=0x0000000000156421 + (void)_postMailAccountsHaveChanged; // IMP=0x0000000000156274 + (id)_accountWithFileSystemPath:(id)arg1 relativePath:(id *)arg2; // IMP=0x0000000000155873 + (void)_enableMailboxListingNotifications:(BOOL)arg1; // IMP=0x00000000001556ab + (void)_disableMailboxListingNotifications; // IMP=0x000000000015561d + (BOOL)_mailboxListingNotificationAreEnabled; // IMP=0x0000000000155592 + (BOOL)_mailAccountsAreInitialized; // IMP=0x0000000000155585 + (void)_normalizePathComponentsInMailboxesDictionary:(id)arg1 accountClass:(Class)arg2; // IMP=0x00000000001553d8 + (void)normalizeMailboxPathComponentsInAllAccounts; // IMP=0x0000000000154df5 + (void)resetCachedAccountPaths; // IMP=0x0000000000154ab8 + (id)mailboxForURL:(id)arg1 forceCreation:(BOOL)arg2 syncableURL:(BOOL)arg3; // IMP=0x00000000001546ca + (id)accountWithURLString:(id)arg1 includeInactiveAccounts:(BOOL)arg2; // IMP=0x0000000000154631 + (id)accountWithURLString:(id)arg1; // IMP=0x000000000015461d + (id)URLForInfo:(id)arg1; // IMP=0x000000000015444f + (id)infoForURL:(id)arg1; // IMP=0x00000000001543a1 + (id)_accountForURL:(id)arg1; // IMP=0x000000000015438d + (id)accountWithSyncableURLString:(id)arg1 includeInactiveAccounts:(BOOL)arg2; // IMP=0x0000000000154193 + (id)_accountForURL:(id)arg1 includeInactiveAccounts:(BOOL)arg2; // IMP=0x0000000000153c42 + (void)_inferMissingCanonicalEmailAddressesForAccounts:(id)arg1 usingEmailAddresses:(id)arg2; // IMP=0x0000000000151ecd + (void)inferMissingCanonicalEmailAddresses; // IMP=0x0000000000151b70 + (id)mailboxForFileSystemPath:(id)arg1 create:(BOOL)arg2; // IMP=0x000000000014eadc + (void)resetAllSpecialMailboxes; // IMP=0x000000000014e954 + (void)synchronouslyEmptyMailboxType:(int)arg1 inAccounts:(id)arg2; // IMP=0x000000000014dfee + (BOOL)allAccountsDeleteInPlace; // IMP=0x000000000014dce1 + (long long)numberOfDaysToKeepLocalTrash; // IMP=0x000000000014d8c6 + (id)accountsInitializingMailboxList; // IMP=0x000000000014c9cd + (void)connectAllAccounts; // IMP=0x000000000014c17a + (void)disconnectAllAccounts; // IMP=0x000000000014c166 + (void)_setOnlineStateOfAllAccountsTo:(BOOL)arg1; // IMP=0x000000000014bf8e + (BOOL)isAnyAccountOnline; // IMP=0x000000000014bded + (BOOL)isAnyAccountOffline; // IMP=0x000000000014bc4c + (void)refreshAllEmailAliases; // IMP=0x000000000014ae18 + (id)defaultDeliveryAccount; // IMP=0x00000000001481b7 + (id)accountDirectoryForSystemAccount:(id)arg1; // IMP=0x0000000000147dc2 + (id)newAccountWithSystemAccount:(id)arg1; // IMP=0x00000000001471c0 + (id)accountWithPath:(id)arg1; // IMP=0x00000000001470e9 + (id)allMailboxes; // IMP=0x0000000000146e4f + (id)archiveMailboxes; // IMP=0x0000000000146de5 + (id)junkMailboxes; // IMP=0x0000000000146db5 + (id)draftMailboxes; // IMP=0x0000000000146d85 + (id)sentMessagesMailboxes; // IMP=0x0000000000146d55 + (id)outboxMailboxes; // IMP=0x0000000000146d25 + (id)trashMailboxes; // IMP=0x0000000000146cf5 + (id)inboxMailboxes; // IMP=0x0000000000146cc7 + (id)_specialMailboxesUsingBlock:(id)arg1; // IMP=0x0000000000146abd + (void)setOrderedActiveMailAccounts:(id)arg1; // IMP=0x0000000000146a18 + (id)orderedActiveMailAccounts; // IMP=0x0000000000146647 + (id)specialMailboxes; // IMP=0x00000000001463af + (id)outboxMessageStore:(BOOL)arg1; // IMP=0x0000000000146325 + (id)accountThatReceivedMessage:(id)arg1 matchingEmailAddress:(id *)arg2 fullUserName:(id *)arg3; // IMP=0x0000000000145c93 + (id)preferredEmailAddressToReplyToMessage:(id)arg1; // IMP=0x0000000000145a5f + (id)addressesThatReceivedMessage:(id)arg1; // IMP=0x00000000001456ab + (id)accountForHeaders:(id)arg1 message:(id)arg2; // IMP=0x00000000001455a5 + (id)accountContainingEmailAddress:(id)arg1; // IMP=0x000000000014558e + (id)_accountContainingEmailAddress:(id)arg1 matchingAddress:(id *)arg2 fullUserName:(id *)arg3; // IMP=0x0000000000144e36 + (BOOL)isEmailAddressInAnyAccount:(id)arg1; // IMP=0x000000000014493a + (id)allEmailAddressesIncludingDisplayName:(BOOL)arg1; // IMP=0x000000000014433d + (id)accountWithParentIdentifier:(id)arg1; // IMP=0x000000000014414b + (id)_accountFromArray:(id)arg1 withIdentifier:(id)arg2; // IMP=0x0000000000143f3b + (id)accountWithIdentifier:(id)arg1; // IMP=0x0000000000143d99 + (void)migrateUnreadCountInMailboxCache; // IMP=0x00000000001435a8 + (void)saveAccountInfoToDefaults; // IMP=0x00000000001434d7 + (id)remoteAccounts; // IMP=0x00000000001433f5 + (BOOL)onlyGmailAccountsUsingAllMailAsArchiveAreActive; // IMP=0x00000000001431a8 + (id)activeAccounts; // IMP=0x0000000000143149 + (id)_activeAccountsFromArray:(id)arg1; // IMP=0x0000000000143012 + (void)_removeAccountFromSortedPaths:(id)arg1; // IMP=0x0000000000142e2a + (void)_setMailAccounts:(id)arg1 reloadingFromStorage:(BOOL)arg2; // IMP=0x0000000000142723 + (void)setMailAccounts:(id)arg1; // IMP=0x000000000014270f + (id)mailAccounts; // IMP=0x0000000000142679 + (id)_systemAccountsWithTypeIdentifiers:(id)arg1 accountStore:(id)arg2; // IMP=0x00000000001425b0 + (id)_newAccountsAndExistingAccounts:(id *)arg1 forAccountTypeIdentifiers:(id)arg2; // IMP=0x0000000000142274 + (void)reloadMailAccountsUsesMailboxCache:(BOOL)arg1; // IMP=0x0000000000141eed + (void)reloadMailAccounts; // IMP=0x0000000000141ed6 + (BOOL)discoverSettingsForIncompleteAccounts; // IMP=0x0000000000141bfb + (void)_setupSortedPathsForAccounts:(id)arg1; // IMP=0x0000000000141a8c + (void)_addAccountToSortedPaths:(id)arg1; // IMP=0x0000000000141891 + (void)completeDeferredAccountInitialization; // IMP=0x00000000001416dd + (BOOL)haveAccountsBeenConfigured; // IMP=0x000000000014140d + (BOOL)accountsHaveBeenInitialized; // IMP=0x00000000001413fa + (void)initialize; // IMP=0x0000000000141322 + (id)accountFetchLog; // IMP=0x00000000001412c5 @property(readonly, nonatomic) NSOperationQueue *backgroundWorkQueue; // @synthesize backgroundWorkQueue=_backgroundWorkQueue; @property(nonatomic) BOOL usesMailboxCache; // @synthesize usesMailboxCache=_usesMailboxCache; @property(readonly, nonatomic) MCTaskManager *taskManager; // @synthesize taskManager=_taskManager; @property(readonly) BOOL isInitializingmailboxList; // @synthesize isInitializingmailboxList=_isInitializingmailboxList; @property(readonly, nonatomic) NSURL *accountDirectory; // @synthesize accountDirectory=_accountDirectory; // - (void).cxx_destruct; // IMP=0x000000000015c017 @property(readonly) BOOL sourceIsManaged; @property(readonly, nonatomic, getter=isManaged) BOOL managed; @property(readonly) NSArray *emailAddresses; - (id)uniqueID; // IMP=0x000000000015bf15 - (id)rootMailboxEvenIfInactive:(BOOL)arg1; // IMP=0x000000000015be04 - (id)_mailboxPathPrefix:(BOOL)arg1; // IMP=0x000000000015bde5 - (id)_URLForInfo:(id)arg1; // IMP=0x000000000015b93e @property(readonly, copy) NSString *URLPersistenceHostname; @property(readonly, copy, nonatomic) NSString *URLPersistenceScheme; - (id)_infoForMatchingURL:(id)arg1; // IMP=0x000000000015b79a @property(readonly, nonatomic) BOOL shouldLogDeleteActivity; - (BOOL)_canEmptyMessagesFromMailbox:(id)arg1; // IMP=0x000000000015b71b - (id)_specialMailboxWithType:(int)arg1 create:(BOOL)arg2; // IMP=0x000000000015b4d4 - (void)_setSpecialMailboxRelativePath:(id)arg1 forType:(int)arg2; // IMP=0x000000000015b36c - (id)_specialMailboxRelativePathForType:(int)arg1; // IMP=0x000000000015b22b - (void)_setSpecialMailbox:(id)arg1 forType:(int)arg2; // IMP=0x000000000015b160 - (BOOL)_assignSpecialMailboxToAppropriateIvar:(id)arg1 forType:(int)arg2; // IMP=0x000000000015ac9c - (id)_defaultSpecialMailboxRelativePathForType:(int)arg1; // IMP=0x000000000015ab90 - (void)_clearAllPathBasedCachesAndDelete:(BOOL)arg1; // IMP=0x000000000015a932 - (void)_emptySpecialMailboxesIfNeededForQuit:(BOOL)arg1; // IMP=0x000000000015a559 - (void)_emptySpecialMailboxesIfNeeded; // IMP=0x000000000015a545 - (BOOL)_deleteMailbox:(id)arg1 reflectToServer:(BOOL)arg2; // IMP=0x000000000015a4ab - (BOOL)_setChildren:(id)arg1 addedChild:(id)arg2 forMailbox:(id)arg3; // IMP=0x000000000015a25d - (BOOL)_setChildren:(id)arg1 forMailbox:(id)arg2; // IMP=0x000000000015a20e - (id)_insertMailbox:(id)arg1 intoParent:(id)arg2 withDisplayName:(id)arg3; // IMP=0x000000000015966c - (void)_writeMailboxCache; // IMP=0x00000000001592f0 - (id)_dictionaryForMailbox:(id)arg1; // IMP=0x0000000000158dbe - (void)_loadEntriesFromFileSystemPath:(id)arg1 parent:(id)arg2; // IMP=0x0000000000157fc9 - (void)_synchronizeMailboxListWithFileSystemBeforeImport; // IMP=0x0000000000157fb2 - (void)_synchronizeMailboxListWithFileSystem; // IMP=0x0000000000157f9e - (void)_synchronizeMailboxListWithFileSystemBeforeImport:(BOOL)arg1; // IMP=0x0000000000157b4d - (BOOL)_readMailboxCache; // IMP=0x00000000001577bb - (void)_mailboxesWereRemovedFromTree:(id)arg1 withFileSystemPaths:(id)arg2; // IMP=0x0000000000157746 - (void)_loadMailboxListingIntoCache:(id)arg1 parent:(id)arg2 addedMailboxes:(id)arg3 removedMailboxes:(id)arg4 hasAllMailMailbox:(char *)arg5; // IMP=0x00000000001566ca - (void)_synchronouslyLoadListingForParent:(id)arg1; // IMP=0x00000000001566c4 - (id)_copyMailboxWithParent:(id)arg1 name:(id)arg2 pathComponent:(id)arg3 attributes:(unsigned long long)arg4 existingMailbox:(id)arg5; // IMP=0x000000000015652f - (void)_writeCustomInfoToMailboxCache:(id)arg1; // IMP=0x00000000001564b3 - (void)_readCustomInfoFromMailboxCache:(id)arg1; // IMP=0x000000000015644b - (void)_postMailAccountsHaveChangedIfNeeded; // IMP=0x00000000001561f8 - (void)setisEnabled:(BOOL)arg1; // IMP=0x0000000000155f9d - (void)_configureMailboxCacheEvenIfInactive:(BOOL)arg1; // IMP=0x0000000000155e49 - (void)messageDeliveryDidFinish:(id)arg1; // IMP=0x0000000000154b3b - (BOOL)discoverSettings; // IMP=0x0000000000154ab0 @property(readonly) BOOL needsToDiscoverSettings; - (void)respondToHostBecomingReachable; // IMP=0x0000000000154a54 @property(readonly, nonatomic) BOOL storesUnseenCount; @property(readonly, nonatomic) BOOL supportsAppleScript; @property(readonly, nonatomic) BOOL hasTrashMailbox; @property(readonly, nonatomic) BOOL isRemoteAccount; @property(readonly, nonatomic) BOOL isZeroConfiguration; @property(readonly, nonatomic) BOOL providesAccountInformation; @property(readonly, nonatomic) BOOL canParticipateInRules; @property(readonly, nonatomic) BOOL isEditableByUser; @property(readonly, nonatomic) BOOL synchronizesDataWithServer; @property(readonly, copy, nonatomic) NSString *syncableURLString; @property(readonly, copy, nonatomic) NSString *URLString; - (id)objectSpecifier; // IMP=0x0000000000153b88 - (id)objectSpecifierForMailbox:(id)arg1; // IMP=0x00000000001539e6 - (id)objectSpecifierForMessageStore:(id)arg1; // IMP=0x0000000000153984 - (id)valueInMailboxesWithName:(id)arg1; // IMP=0x0000000000153692 - (id)mailboxForRelativePath:(id)arg1 isFilesystemPath:(BOOL)arg2 create:(BOOL)arg3; // IMP=0x0000000000152e9e - (id)storeForMailbox:(id)arg1 createIfNeeded:(BOOL)arg2; // IMP=0x0000000000152c33 - (id)storeForMailbox:(id)arg1; // IMP=0x0000000000152ba6 @property(retain) NSError *connectionError; - (void)accountInfoDidChange; // IMP=0x0000000000151722 @property long long portNumber; @property(copy) NSString *hostname; - (void)setUsername:(id)arg1; // IMP=0x0000000000151426 - (void)_resetAllMailboxURLs; // IMP=0x00000000001510af - (void)invalidateChildrenOfMailbox:(id)arg1; // IMP=0x0000000000151098 - (BOOL)deleteMailbox:(id)arg1 reflectToServer:(BOOL)arg2; // IMP=0x0000000000150526 - (BOOL)renameMailbox:(id)arg1 newDisplayName:(id)arg2 parent:(id)arg3; // IMP=0x000000000014f557 - (id)createMailboxWithParent:(id)arg1 displayName:(id)arg2 localizedDisplayName:(id)arg3; // IMP=0x000000000014f4e7 - (id)createMailboxWithParent:(id)arg1 name:(id)arg2; // IMP=0x000000000014f48c @property(readonly, nonatomic) BOOL supportsNormalContainerOnlyMailboxes; - (BOOL)canMailboxBeDeleted:(id)arg1; // IMP=0x000000000014f3ef - (BOOL)canMailboxBeRenamed:(id)arg1; // IMP=0x000000000014f32b - (id)validNameForMailbox:(id)arg1 fromDisplayName:(id)arg2 error:(id *)arg3; // IMP=0x000000000014ef47 @property(readonly, nonatomic) BOOL supportsSlashesInMailboxName; @property(readonly, nonatomic) BOOL canMoveMailboxes; @property(readonly, nonatomic) BOOL canCreateNewMailboxes; @property(readonly, copy, nonatomic) NSString *mailboxPathExtension; - (BOOL)resetSpecialMailboxes; // IMP=0x000000000014e3c6 @property(readonly, nonatomic) BOOL containsMailboxes; - (id)displayNameForMailbox:(id)arg1; // IMP=0x000000000014e301 @property(readonly, copy, nonatomic) NSString *displayName; - (void)emptySpecialMailboxesThatNeedToBeEmptiedAtQuit; // IMP=0x000000000014df20 @property(readonly, nonatomic) BOOL canMoveDeletedMessagesToTrash; @property BOOL shouldMoveDeletedMessagesToTrash; @property(nonatomic) long long emptyTrashFrequency; @property(nonatomic) long long emptyJunkFrequency; - (void)setEmptySentMessagesFrequency:(long long)arg1; // IMP=0x000000000014d830 - (long long)emptySentMessagesFrequency; // IMP=0x000000000014d823 - (long long)_emptyFrequencyForMailDataclassProperty:(id)arg1 defaultValue:(long long)arg2; // IMP=0x000000000014d74b - (void)deleteMessagesFromMailbox:(id)arg1 olderThanNumberOfDays:(long long)arg2; // IMP=0x000000000014d4fa - (void)setToDosMailbox:(id)arg1; // IMP=0x000000000014d4e3 - (void)setArchiveMailbox:(id)arg1; // IMP=0x000000000014d4cc @property(retain, nonatomic) MFMailbox *notesMailbox; - (void)setSentMessagesMailbox:(id)arg1; // IMP=0x000000000014d49e - (void)setJunkMailbox:(id)arg1; // IMP=0x000000000014d487 - (void)setTrashMailbox:(id)arg1; // IMP=0x000000000014d470 - (void)setDraftsMailbox:(id)arg1; // IMP=0x000000000014d459 - (id)allMailboxesEvenIfInactive:(BOOL)arg1; // IMP=0x000000000014d282 @property(readonly, copy, nonatomic) NSArray *allMailboxes; - (BOOL)containsMailboxWithURL:(id)arg1; // IMP=0x000000000014d1c0 - (id)mailboxForType:(long long)arg1; // IMP=0x000000000014d12a - (id)_outboxMailboxCreateIfNeeded:(BOOL)arg1; // IMP=0x000000000014d111 - (id)archiveMailboxCreateIfNeeded:(BOOL)arg1; // IMP=0x000000000014d0f8 - (id)trashMailboxCreateIfNeeded:(BOOL)arg1; // IMP=0x000000000014d0a9 - (id)sentMessagesMailboxCreateIfNeeded:(BOOL)arg1; // IMP=0x000000000014d090 - (id)junkMailboxCreateIfNeeded:(BOOL)arg1; // IMP=0x000000000014d077 - (id)draftsMailboxCreateIfNeeded:(BOOL)arg1; // IMP=0x000000000014d05e - (id)inboxMailboxCreateIfNeeded:(BOOL)arg1; // IMP=0x000000000014cfbb @property(readonly) BOOL shouldArchiveByDefault; @property(readonly) BOOL isLocalAccount; @property(readonly, nonatomic) BOOL rootChildrenCanBePromoted; @property(readonly) BOOL rootMailboxExists; - (BOOL)mailboxIsRootMailbox:(id)arg1; // IMP=0x000000000014ceda @property(readonly, copy) MFMailbox *rootMailbox; @property(readonly) MFMailbox *primaryMailbox; @property(readonly, nonatomic) BOOL hasSyncActivity; @property(readonly, nonatomic) BOOL isFetching; - (void)stopAccountActivity; // IMP=0x000000000014ce4a - (void)fetchAsynchronouslyIsAuto:(BOOL)arg1; // IMP=0x000000000014ce44 - (BOOL)shouldFetchIsAuto:(BOOL)arg1; // IMP=0x000000000014cdd3 @property BOOL needsChecking; - (void)didInitializeMailboxList; // IMP=0x000000000014cc63 - (void)willInitializeMailboxList; // IMP=0x000000000014cc06 - (BOOL)isInitializingMailboxList; // IMP=0x000000000014cba8 - (BOOL)_supportsMailboxListInitialization; // IMP=0x000000000014cba0 @property(readonly, nonatomic) BOOL canBeSynchronized; @property(readonly, nonatomic) BOOL canAppendMessages; - (void)synchronizeWithFilesystem; // IMP=0x000000000014c8d8 @property(readonly, nonatomic) NSOperationQueue *remoteFetchQueue; @property(readonly, nonatomic) NSOperationQueue *remoteTaskQueue; @property(readonly, nonatomic) BOOL canFetch; - (void)setIsWillingToGoOnline:(BOOL)arg1; // IMP=0x000000000014c85d - (void)setIsOffline:(BOOL)arg1; // IMP=0x000000000014c6fd - (void)invalidateAllStores; // IMP=0x000000000014c53d - (void)doRoutineCleanup; // IMP=0x000000000014c4a5 - (void)_scheduleCacheWriteIfNeeded; // IMP=0x000000000014c309 - (void)_cancelCacheWriteTimer; // IMP=0x000000000014c284 - (void)saveCacheImmediately:(BOOL)arg1; // IMP=0x000000000014c191 - (void)deleteAccount; // IMP=0x000000000014ba07 - (void)_synchronouslyInvalidateAndDelete:(BOOL)arg1; // IMP=0x000000000014b5cc - (BOOL)fileManager:(id)arg1 shouldProceedAfterError:(id)arg2 removingItemAtURL:(id)arg3; // IMP=0x000000000014b580 - (BOOL)fileManager:(id)arg1 shouldProceedAfterError:(id)arg2 removingItemAtPath:(id)arg3; // IMP=0x000000000014b534 - (void)promptUserForDeletionInWindow:(id)arg1 completion:(id)arg2; // IMP=0x000000000014b4ac - (void)setShouldAutoFetch:(BOOL)arg1; // IMP=0x000000000014b4a6 - (BOOL)shouldAutoFetch; // IMP=0x000000000014b49b - (void)setParentAccountDefaultAlias:(id)arg1; // IMP=0x000000000014a588 - (BOOL)getDefaultEmailAliasDisplayName:(id *)arg1 emailAddress:(id *)arg2; // IMP=0x0000000000149f79 @property(readonly) NSURL *emailAliasesEditingURL; @property(copy) NSArray *emailAliases; - (void)setApplescriptEmailAddresses:(id)arg1; // IMP=0x000000000014936b - (id)applescriptEmailAddresses; // IMP=0x0000000000149321 @property(copy) NSArray *emailAddressStrings; @property(readonly, copy) NSArray *rawEmailAddresses; @property(readonly, copy) NSString *firstEmailAddress; @property BOOL deliveryAccountIsLocked; - (void)_mailboxUserInfoDidChange:(id)arg1; // IMP=0x00000000001488d6 @property(retain, nonatomic) NSNumber *useMailDrop; - (void)_deliveryAccountWillBeRemoved:(id)arg1; // IMP=0x0000000000148756 @property(readonly, nonatomic) MFDeliveryAccount *dynamicDeliveryAccount; @property(retain, nonatomic) MFDeliveryAccount *deliveryAccount; @property(copy) NSString *smtpIdentifier; @property(copy) NSString *fullUserName; - (void)setApplescriptFullUserName:(id)arg1; // IMP=0x0000000000147fbc - (id)applescriptFullUserName; // IMP=0x0000000000147f70 - (id)path; // IMP=0x0000000000147d72 @property(readonly, copy, nonatomic) NSString *statisticsKind; - (BOOL)_performBlock:(id)arg1 forMessagesFetchedWithBlock:(id)arg2 withBatchSize:(unsigned long long)arg3; // IMP=0x000000000014772e - (void)completeDeferredInitialization; // IMP=0x0000000000147728 - (void)dealloc; // IMP=0x0000000000147689 - (id)initWithSystemAccount:(id)arg1; // IMP=0x00000000001473b7 - (id)_newTaskManager; // IMP=0x00000000001471b8 @property(readonly) MFMailbox *allMailMailbox; @property(readonly, nonatomic) MFMailbox *todosMailbox; - (BOOL)mailboxIsEmpty:(id)arg1; // IMP=0x0000000000146194 - (BOOL)hasEmailAddress:(id)arg1; // IMP=0x0000000000144aba @property(readonly, copy) NSString *backupID; - (void)setEmptyJunkOnQuit:(BOOL)arg1; // IMP=0x000000000014129f - (BOOL)emptyJunkOnQuit; // IMP=0x0000000000141282 - (void)setEmptySentMessagesOnQuit:(BOOL)arg1; // IMP=0x000000000014127c - (BOOL)emptySentMessagesOnQuit; // IMP=0x0000000000141274 - (void)setEmptyTrashOnQuit:(BOOL)arg1; // IMP=0x000000000014124e - (BOOL)emptyTrashOnQuit; // IMP=0x0000000000141231 - (void)insertInMailboxes:(id)arg1; // IMP=0x000000000014121d - (void)insertInMailboxes:(id)arg1 atIndex:(unsigned long long)arg2; // IMP=0x000000000014117c @property(readonly, copy, nonatomic) NSArray *mailboxes; - (id)indicesOfObjectsByEvaluatingObjectSpecifier:(id)arg1; // IMP=0x0000000000140e58 // Remaining properties @property(readonly, copy) NSString *accountTypeString; @property BOOL allowInsecureAuthentication; @property(readonly, copy) NSString *appleAuthenticationToken; @property(readonly, copy) NSString *applePersonID; @property(copy) NSString *authenticationScheme; @property(copy) NSString *canonicalEmailAddress; @property(readonly, copy) NSString *clientInfo; @property BOOL configureDynamically; @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly, copy, nonatomic) NSDate *expiryDate; @property(copy) NSString *externalHostname; @property(readonly) unsigned long long hash; @property(readonly, copy) NSString *identifier; @property(readonly, nonatomic) BOOL isGmailAccount; @property(readonly) BOOL isManaged; @property(readonly) BOOL isYahooAccount; @property(readonly, copy) NSString *machineID; @property(readonly, copy) NSString *oauthToken; @property(readonly, copy) NSString *oneTimePassword; @property(copy) NSString *password; @property(retain) ECAuthScheme *preferredAuthScheme; @property(readonly) BOOL primaryiCloudAccount; @property(readonly, nonatomic) BOOL requiresAuthentication; @property(readonly, copy, nonatomic) NSString *saslProfileName; @property long long securityLayerType; @property BOOL shouldUseAuthentication; @property(readonly, copy, nonatomic) NSArray *standardPorts; @property(readonly, copy, nonatomic) NSArray *standardSSLPorts; @property(readonly) Class superclass; @property(readonly, copy) ACAccount *systemAccount; @property(readonly, copy) NSString *username; @property BOOL usesSSL; @end
8,763
375
<reponame>obrienben/openwayback package org.archive.wayback.resourceindex; import java.io.IOException; import java.util.List; import org.archive.format.gzip.zipnum.TimestampBestPickDedupIterator; import org.archive.format.gzip.zipnum.ZipNumCluster; import org.archive.format.gzip.zipnum.ZipNumParams; import org.archive.util.iterator.CloseableIterator; import org.archive.wayback.core.CaptureSearchResult; import org.archive.wayback.exception.ResourceIndexNotAvailableException; import org.archive.wayback.resourceindex.cdx.format.CDXFlexFormat; import org.archive.wayback.util.AdaptedIterator; import org.archive.wayback.util.Adapter; /** * SearchResultSource wrapper for new implementation of CDX input source, including ZipNumCluster and CDX File * from archive-commons * @author ilya * */ public class ZipNumClusterSearchResultSource implements SearchResultSource, Adapter<String,CaptureSearchResult> { protected ZipNumCluster cluster; protected ZipNumParams params = null; protected ZipNumParams oneBlockParams; protected int timestampDedupLength = 0; protected List<String> ignoreRobotPaths; public void init() throws IOException { //this.cluster = new ZipNumCluster(clusterUri, summaryFile, blockLoader); oneBlockParams = new ZipNumParams(); oneBlockParams.setMaxBlocks(1); } @Override public CloseableIterator<CaptureSearchResult> getPrefixIterator( String urlkey) throws ResourceIndexNotAvailableException { try { CloseableIterator<String> cdxIter = null; String prefix = urlkey; int space = prefix.indexOf(' '); // One-block query if (space >= 0) { prefix = prefix.substring(0, space); cdxIter = cluster.getCDXIterator(urlkey, prefix, true, oneBlockParams); // Exact Match } else if (!prefix.endsWith("*\t")) { cdxIter = cluster.getCDXIterator(urlkey, prefix, true, params); // Prefix Match } else { cdxIter = cluster.getCDXIterator(urlkey, prefix.substring(0, prefix.length() - 2), false, params); } if (timestampDedupLength > 0) { cdxIter = new TimestampBestPickDedupIterator(cdxIter, timestampDedupLength); } return new AdaptedIterator<String,CaptureSearchResult>(cdxIter, this); } catch (IOException e) { throw new ResourceIndexNotAvailableException(e.toString()); } } @Override public CloseableIterator<CaptureSearchResult> getPrefixReverseIterator( String prefix) throws ResourceIndexNotAvailableException { throw new ResourceIndexNotAvailableException("Unsupported"); } @Override public void cleanup(CloseableIterator<CaptureSearchResult> c) throws IOException { c.close(); } @Override public void shutdown() throws IOException { // TODO Auto-generated method stub } public ZipNumCluster getCluster() { return cluster; } public void setCluster(ZipNumCluster cluster) { this.cluster = cluster; } public ZipNumParams getParams() { return params; } public void setParams(ZipNumParams params) { this.params = params; } @Override public CaptureSearchResult adapt(String line) { CaptureSearchResult result = CDXFlexFormat.parseCDXLineFlexFast(line); if (ignoreRobotPaths != null) { for (String path : ignoreRobotPaths) { if (result.getUrlKey().startsWith(path)) { result.setRobotIgnore(); break; } } } return result; } public List<String> getIgnoreRobotPaths() { return ignoreRobotPaths; } public void setIgnoreRobotPaths(List<String> ignoreRobotPaths) { this.ignoreRobotPaths = ignoreRobotPaths; } public int getTimestampDedupLength() { return timestampDedupLength; } public void setTimestampDedupLength(int timestampDedupLength) { this.timestampDedupLength = timestampDedupLength; } }
1,322
1,768
// Copyright (c) 2012 <NAME>. All rights reserved. package tlc2.tool.fp; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.rmi.RemoteException; import org.junit.Test; import tlc2.tool.TLCState; import tlc2.tool.TLCTrace; import tlc2.tool.queue.DummyTLCState; public abstract class AbstractHeapBasedDiskFPSetTest { /* Test the lower limits */ @Test public void testCtorLLMinus1() throws RemoteException { doTest(getLowerLimit() - 1); } @Test public void testCtorLL() throws RemoteException { doTest(getLowerLimit()); } @Test public void testCtorLLPlus1() throws RemoteException { doTest(getLowerLimit() + 1); } @Test public void testCtorLLNextPow2Min1() throws RemoteException { doTest((getLowerLimit() << 1) - 1); } /* Test with a power far away from upper/lower limits */ @Test public void testCtorPow16Minus1() throws RemoteException { doTest((1L << 16) - 1); } @Test public void testCtorPow16() throws RemoteException { doTest(1L << 16); } @Test public void testCtorPow16Plus1() throws RemoteException { doTest((1L << 16) + 1); } @Test public void testCtorPow16NextPow2Min1() throws RemoteException { doTest(((1L << 16) << 1) - 1); } /* Test the upper limits */ @Test public void testCtorULMinus1() throws RemoteException { doTest(getUpperLimit() - 1); } @Test public void testCtorUL() throws RemoteException { doTest(getUpperLimit()); } @Test public void testCtorULPlus1() throws RemoteException { doTest(getUpperLimit() + 1); } @Test public void testCtorULNextPow2Min1() throws RemoteException { doTest((getUpperLimit() << 1) - 1); } @Test public void testFPSetRecovery() throws IOException { final int limit = 99999; final String metadir = System.getProperty("java.io.tmpdir"); final String filename = this.getClass().getCanonicalName(); // First, create a trace file to recover from. final TLCTrace trace = new TLCTrace(metadir, filename, null); // Fill the trace file with random fingerprints final TLCState predecessor = new DummyTLCState(); predecessor.uid = 1L; // an init state trace.writeState(predecessor.uid); // successor states for (long fp = predecessor.uid + 1; fp < limit; fp++) { trace.writeState(predecessor, fp); predecessor.uid = fp; } // Create a checkpoint file trace.beginChkpt(); trace.commitChkpt(); // Create a DiskFPSet final DiskFPSet fpSet = getDiskFPSet(new FPSetConfiguration()); fpSet.init(1, metadir, filename); fpSet.recover(trace); // Verify successful recovery assertEquals(limit-1, fpSet.size()); for (long fp = 1L; fp < limit; fp++) { assertTrue(fpSet.contains(fp)); } } @Test public void testFPSetRecovery2() throws IOException { final String metadir = System.getProperty("java.io.tmpdir"); final String filename = this.getClass().getCanonicalName() + "testFPSetRecovery2"; final DiskFPSet fpSet = getDiskFPSet(new FPSetConfiguration()); fpSet.init(1, metadir, filename); // Make sure the FPSet tries to flush to disk. fpSet.forceFlush(); for (long fp = 1; fp <= 1024; fp++) { fpSet.recoverFP(fp); } } /* Helper */ @SuppressWarnings("deprecation") protected void doTest(final long physicalMemoryInBytes) throws RemoteException { final FPSetConfiguration fpSetConfig = new DummyFPSetConfiguration(); fpSetConfig.setMemory(physicalMemoryInBytes); final DiskFPSet fpset = getDiskFPSet(fpSetConfig); long maxTblCntInBytes = fpset.getMaxTblCnt() * FPSet.LongSize; // Convert from logical to physical // Always allocate less storage than given assertTrue("Internal storage exceeds allocated memory", physicalMemoryInBytes >= maxTblCntInBytes); // Storage with zero space for a fingerprint does make much sense, does it? assertTrue("Internal storage underflow allocated memory", 0L < maxTblCntInBytes); // We happen to know that LSBDiskFPSet reserves some memory to auxiliary // storage. Make this reservation the lower bound for the primary // storage. We div by 2 to account for the fact that both // implementations round down to the next power of 2. double lowerLimit = (physicalMemoryInBytes / 2) / fpset.getAuxiliaryStorageRequirement(); assertTrue("Internal storage falls short lower allocation limit", lowerLimit <= maxTblCntInBytes); } protected abstract DiskFPSet getDiskFPSet(final FPSetConfiguration fpSetConfig) throws RemoteException; /** * @return The lower size limit the {@link HeapBasedDiskFPSet} can handle. * It is determined by the implementation's * {@link DiskFPSet#getAuxiliaryStorageRequirement()}. */ protected abstract long getLowerLimit(); /** * @return The upper size limit the {@link HeapBasedDiskFPSet} can handle. */ protected abstract long getUpperLimit(); }
1,721
518
{ "name": "Active Campaign", "category": "Marketing & Analytics", "start_url": "http://www.activecampaign.com/login/", "icons": [ { "src": "https://cdn.filestackcontent.com/hjsKmsPR7mOFSvPQdzqr", "platform": "browserx" } ], "theme_color": "#255EAF", "scope": "http://www.activecampaign.com", "bx_legacy_service_id": "active-campaign", "bx_multi_instance_config": { "presets": [ "on-premise" ] } }
200
543
// automatically generated by the FlatBuffers compiler, do not modify package com.riiablo.net.packet.bnls; import java.nio.*; import java.lang.*; import java.util.*; import com.google.flatbuffers.*; @SuppressWarnings("unused") public final class QueryRealms extends Table { public static void ValidateVersion() { Constants.FLATBUFFERS_1_12_0(); } public static QueryRealms getRootAsQueryRealms(ByteBuffer _bb) { return getRootAsQueryRealms(_bb, new QueryRealms()); } public static QueryRealms getRootAsQueryRealms(ByteBuffer _bb, QueryRealms obj) { _bb.order(ByteOrder.LITTLE_ENDIAN); return (obj.__assign(_bb.getInt(_bb.position()) + _bb.position(), _bb)); } public void __init(int _i, ByteBuffer _bb) { __reset(_i, _bb); } public QueryRealms __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; } public com.riiablo.net.packet.bnls.Realm realms(int j) { return realms(new com.riiablo.net.packet.bnls.Realm(), j); } public com.riiablo.net.packet.bnls.Realm realms(com.riiablo.net.packet.bnls.Realm obj, int j) { int o = __offset(4); return o != 0 ? obj.__assign(__indirect(__vector(o) + j * 4), bb) : null; } public int realmsLength() { int o = __offset(4); return o != 0 ? __vector_len(o) : 0; } public com.riiablo.net.packet.bnls.Realm.Vector realmsVector() { return realmsVector(new com.riiablo.net.packet.bnls.Realm.Vector()); } public com.riiablo.net.packet.bnls.Realm.Vector realmsVector(com.riiablo.net.packet.bnls.Realm.Vector obj) { int o = __offset(4); return o != 0 ? obj.__assign(__vector(o), 4, bb) : null; } public static int createQueryRealms(FlatBufferBuilder builder, int realmsOffset) { builder.startTable(1); QueryRealms.addRealms(builder, realmsOffset); return QueryRealms.endQueryRealms(builder); } public static void startQueryRealms(FlatBufferBuilder builder) { builder.startTable(1); } public static void addRealms(FlatBufferBuilder builder, int realmsOffset) { builder.addOffset(0, realmsOffset, 0); } public static int createRealmsVector(FlatBufferBuilder builder, int[] data) { builder.startVector(4, data.length, 4); for (int i = data.length - 1; i >= 0; i--) builder.addOffset(data[i]); return builder.endVector(); } public static void startRealmsVector(FlatBufferBuilder builder, int numElems) { builder.startVector(4, numElems, 4); } public static int endQueryRealms(FlatBufferBuilder builder) { int o = builder.endTable(); return o; } public static final class Vector extends BaseVector { public Vector __assign(int _vector, int _element_size, ByteBuffer _bb) { __reset(_vector, _element_size, _bb); return this; } public QueryRealms get(int j) { return get(new QueryRealms(), j); } public QueryRealms get(QueryRealms obj, int j) { return obj.__assign(__indirect(__element(j), bb), bb); } } }
968
679
<filename>main/slideshow/source/inc/shapemanager.hxx<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef INCLUDED_SLIDESHOW_SHAPEMANAGER_HXX #define INCLUDED_SLIDESHOW_SHAPEMANAGER_HXX #include "disposable.hxx" #include <com/sun/star/uno/Reference.hxx> #include <boost/shared_ptr.hpp> namespace com { namespace sun { namespace star { namespace drawing { class XShape; } } } } /* Definition of ShapeManager interface */ namespace slideshow { namespace internal { class HyperlinkArea; class AnimatableShape; class Shape; /** ShapeManager interface Implementers of this interface manage appearance and animation of slideshow shapes. */ class ShapeManager : public Disposable { public: /** Notify the ShapeManager that the given Shape starts an animation now. This method enters animation mode for the Shape. If the shape is already in animation mode, the call is counted, and the shape only leaves animation mode after a corresponding number of leaveAnimationMode() calls. */ virtual void enterAnimationMode( const boost::shared_ptr<AnimatableShape>& rShape ) = 0; /** Notify the ShapeManager that the given Shape is no longer animated. When called a corresponding number of times as enterAnimationMode() for a given shape, this methods ends animation mode for the given Shape. It is illegal to call this method more often than enterAnimationMode(). */ virtual void leaveAnimationMode( const boost::shared_ptr<AnimatableShape>& rShape ) = 0; /** Notify that a shape needs an update This method notifies the ShapeManager that a shape update is necessary. Use this if e.g. a running animation changed the shape appearance. @param rShape Shape which needs an update */ virtual void notifyShapeUpdate( const boost::shared_ptr<Shape>& rShape ) = 0; /** Lookup a Shape from an XShape model object This method looks up the internal shape map for one representing the given XShape. @param xShape The XShape object, for which the representing Shape should be looked up. */ virtual boost::shared_ptr<Shape> lookupShape( ::com::sun::star::uno::Reference< ::com::sun::star::drawing::XShape > const & xShape ) const = 0; /** Register given shape as a hyperlink target @param rArea Hyperlink sensitive area. Will participate in hyperlink region lookup. Must be in absolute user space coordinates. */ virtual void addHyperlinkArea( const boost::shared_ptr<HyperlinkArea>& rArea ) = 0; /** Unregister given shape as a hyperlink target @param rArea Hyperlink sensitive area. Will cease to participate in hyperlink region lookup. */ virtual void removeHyperlinkArea( const boost::shared_ptr<HyperlinkArea>& rArea ) = 0; }; typedef ::boost::shared_ptr< ShapeManager > ShapeManagerSharedPtr; } } #endif /* INCLUDED_SLIDESHOW_SHAPEMANAGER_HXX */
1,620
665
<reponame>opencirclesolutions/isis<filename>core/metamodel/src/main/java/org/apache/isis/core/metamodel/facets/ObjectTypeFacetFactory.java<gh_stars>100-1000 /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.core.metamodel.facets; import org.apache.isis.applib.annotation.Introspection.IntrospectionPolicy; import org.apache.isis.commons.internal.exceptions._Exceptions; import org.apache.isis.core.metamodel.facetapi.FacetHolder; /** * Processes logicalTypeName and determines the effective IntrospectionPolicy. */ public interface ObjectTypeFacetFactory extends FacetFactory { public static class ProcessObjectTypeContext extends AbstractProcessWithClsContext<FacetHolder> { public ProcessObjectTypeContext( final Class<?> cls, final FacetHolder facetHolder) { super(cls, IntrospectionPolicy.ANNOTATION_OPTIONAL, // not used - but to satisfy constraints facetHolder); } @Override public IntrospectionPolicy getIntrospectionPolicy() { throw _Exceptions.unsupportedOperation( "ProcessObjectTypeContext does not support getIntrospectionPolicy() " + "as the IntrospectionPolicy is not yet available this early " + "in the meta-model processing stage."); } } void process(ProcessObjectTypeContext processClassContext); }
762
435
""" gizeh/__init__.py """ # __all__ = [] from .gizeh import * from .geometry import * from .version import __version__
45
4,283
<reponame>software-is-art/hazelcast /* * Copyright 2021 Hazelcast Inc. * * Licensed under the Hazelcast Community License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://hazelcast.com/hazelcast-community-license * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.hadoop; import com.hazelcast.function.BiFunctionEx; import com.hazelcast.function.ConsumerEx; import com.hazelcast.jet.Util; import com.hazelcast.jet.core.Processor; import com.hazelcast.jet.hadoop.impl.SerializableConfiguration; import com.hazelcast.jet.pipeline.BatchSource; import com.hazelcast.jet.pipeline.Sources; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.lib.input.LineRecordReader; import javax.annotation.Nonnull; import java.util.Map.Entry; import static com.hazelcast.jet.hadoop.HadoopProcessors.readHadoopP; /** * Contains factory methods for Apache Hadoop sources. * * @since Jet 3.0 */ public final class HadoopSources { /** * With the new HDFS API, some of the {@link RecordReader}s return the same * key/value instances for each record, for example {@link LineRecordReader}. * If this property is set to {@code true}, the source makes a copy of each * object after applying the {@code projectionFn}. For readers which create * a new instance for each record, the source can be configured to not copy * the objects for performance. * <p> * Also if you are using a projection function which doesn't refer to any * mutable state from the key or value, then it makes sense to set this * property to {@code false} to avoid unnecessary copying. * <p> * The source copies the objects by serializing and de-serializing them. The * objects should be either {@link Writable} or serializable in a way which * Jet can serialize/de-serialize. * <p> * Here is how you can configure the source. Default and always safe value is * {@code true}: * * <pre>{@code * Configuration conf = new Configuration(); * conf.setBoolean(HadoopSources.COPY_ON_READ, false); * BatchSource<Entry<K, V>> source = HadoopSources.inputFormat(conf); * }</pre> */ public static final String COPY_ON_READ = "jet.source.copyonread"; /** * When reading files from local file system using Hadoop, each processor * reads files from its own local file system. If the local file system * is shared between members, e.g NFS mounted filesystem, you should * configure this property as {@code true}. * <p> * Here is how you can configure the source. Default value is {@code false}: * * <pre>{@code * Configuration conf = new Configuration(); * conf.setBoolean(HadoopSources.SHARED_LOCAL_FS, true); * BatchSource<Entry<K, V>> source = HadoopSources.inputFormat(conf); * }</pre> * * @since Jet 4.4 */ public static final String SHARED_LOCAL_FS = "jet.source.sharedlocalfs"; /** * @since Jet 4.4 */ public static final String IGNORE_FILE_NOT_FOUND = "jet.source.ignorefilenotfound"; private HadoopSources() { } /** * Returns a source that reads records from Apache Hadoop HDFS and emits * the results of transforming each record (a key-value pair) with the * supplied projection function. * <p> * This source splits and balances the input data among Jet {@linkplain * Processor processors}, doing its best to achieve data locality. To this * end the Jet cluster topology should be aligned with Hadoop's &mdash; on * each Hadoop member there should be a Jet member. * <p> * The processor will use either the new or the old MapReduce API based on * the key which stores the {@code InputFormat} configuration. If it's * stored under {@value MRJobConfig#INPUT_FORMAT_CLASS_ATTR}, the new API * will be used. Otherwise, the old API will be used. If you get the * configuration from {@link Job#getConfiguration()}, the new API will be * used. Please see {@link #COPY_ON_READ} if you are using the new API. * <p> * The default local parallelism for this processor is 2 (or less if less CPUs * are available). * <p> * This source does not save any state to snapshot. If the job is restarted, * all entries will be emitted again. * * @param <K> key type of the records * @param <V> value type of the records * @param <E> the type of the emitted value * @param configuration JobConf for reading files with the appropriate * input format and path * @param projectionFn function to create output objects from key and value. * If the projection returns a {@code null} for an item, that item * will be filtered out */ @Nonnull public static <K, V, E> BatchSource<E> inputFormat( @Nonnull Configuration configuration, @Nonnull BiFunctionEx<K, V, E> projectionFn ) { return Sources.batchFromProcessor("hdfsSource", readHadoopP(SerializableConfiguration.asSerializable(configuration), projectionFn)); } /** * Returns a source that reads records from Apache Hadoop HDFS and emits * the results of transforming each record (a key-value pair) with the * supplied projection function. * <p> * This source splits and balances the input data among Jet {@linkplain * Processor processors}, doing its best to achieve data locality. To this * end the Jet cluster topology should be aligned with Hadoop's &mdash; on * each Hadoop member there should be a Jet member. * <p> * The {@code configureFn} is used to configure the MR Job. The function is * run on the coordinator node of the Jet Job, avoiding contacting the server * from the machine where the job is submitted. * <p> * The new MapReduce API will be used. * <p> * The default local parallelism for this processor is 2 (or less if less CPUs * are available). * <p> * This source does not save any state to snapshot. If the job is restarted, * all entries will be emitted again. * * @param <K> key type of the records * @param <V> value type of the records * @param <E> the type of the emitted value * @param configureFn function to configure the MR job * @param projectionFn function to create output objects from key and value. * If the projection returns a {@code null} for an item, that item * will be filtered out */ @Nonnull public static <K, V, E> BatchSource<E> inputFormat( @Nonnull ConsumerEx<Configuration> configureFn, @Nonnull BiFunctionEx<K, V, E> projectionFn ) { return Sources.batchFromProcessor("readHadoop", readHadoopP(null, configureFn, projectionFn)); } /** * Convenience for {@link #inputFormat(Configuration, BiFunctionEx)} * with {@link java.util.Map.Entry} as its output type. */ @Nonnull public static <K, V> BatchSource<Entry<K, V>> inputFormat(@Nonnull Configuration jobConf) { return inputFormat(jobConf, (BiFunctionEx<K, V, Entry<K, V>>) Util::entry); } }
2,826
28,814
<reponame>tangoblast817/zxing /* * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.web.generator.client; /** * A class containing a list of timezones, with their full names, and time * offset. * * @author <NAME> */ final class TimeZoneList { private TimeZoneList() { } private static final long ONE_HOUR = 60L * 60 * 1000; private static final long THIRTY_MIN = 30L * 60 * 1000; static final TimeZoneInfo[] TIMEZONES = { new TimeZoneInfo("GMT", "Greenwich Mean Time", "GMT", 0), // 0 new TimeZoneInfo("UTC", "Universal Coordinated Time", "GMT", 0), new TimeZoneInfo("ECT", "European Central Time", "GMT+1:00", ONE_HOUR), new TimeZoneInfo("EET", "Eastern European Time", "GMT+2:00", 2 * ONE_HOUR), new TimeZoneInfo("ART", "(Arabic) Egypt Standard Time", "GMT+2:00", 2 * ONE_HOUR), new TimeZoneInfo("EAT", "Eastern African Time", "GMT+3:00", 3 * ONE_HOUR), // 5 new TimeZoneInfo("MET", "Middle East Time", "GMT+3:30", 3 * ONE_HOUR + THIRTY_MIN), new TimeZoneInfo("NET", "Near East Time", "GMT+4:00", 4 * ONE_HOUR), new TimeZoneInfo("PLT", "Pakistan Lahore Time", "GMT+5:00", 5 * ONE_HOUR), new TimeZoneInfo("IST", "India Standard Time", "GMT+5:30", 5 * ONE_HOUR + THIRTY_MIN), new TimeZoneInfo("BST", "Bangladesh Standard Time", "GMT+6:00", 6 * ONE_HOUR), // 10 new TimeZoneInfo("VST", "Vietnam Standard Time", "GMT+7:00", 7 * ONE_HOUR), new TimeZoneInfo("CTT", "China Taiwan Time", "GMT+8:00", 8 * ONE_HOUR), new TimeZoneInfo("JST", "Japan Standard Time", "GMT+9:00", 9 * ONE_HOUR), new TimeZoneInfo("ACT", "Australia Central Time", "GMT+9:30", 9 * ONE_HOUR + THIRTY_MIN), new TimeZoneInfo("AET", "Australia Eastern Time", "GMT+10:00", 10 * ONE_HOUR), // 15 new TimeZoneInfo("SST", "Solomon Standard Time", "GMT+11:00", 11 * ONE_HOUR), new TimeZoneInfo("NST", "New Zealand Standard Time", "GMT+12:00", 12 * ONE_HOUR), new TimeZoneInfo("MIT", "Midway Islands Time", "GMT-11:00", -11 * ONE_HOUR), new TimeZoneInfo("HST", "Hawaii Standard Time", "GMT-10:00", -10 * ONE_HOUR), new TimeZoneInfo("AST", "Alaska Standard Time", "GMT-9:00", -9 * ONE_HOUR), // 20 new TimeZoneInfo("PST", "Pacific Standard Time", "GMT-8:00", -8 * ONE_HOUR), new TimeZoneInfo("PNT", "Phoenix Standard Time", "GMT-7:00", -7 * ONE_HOUR), new TimeZoneInfo("MST", "Mountain Standard Time", "GMT-7:00", -7 * ONE_HOUR), new TimeZoneInfo("CST", "Central Standard Time", "GMT-6:00", -6 * ONE_HOUR), new TimeZoneInfo("EST", "Eastern Standard Time", "GMT-5:00", -5 * ONE_HOUR), // 25 new TimeZoneInfo("IET", "Indiana Eastern Standard Time", "GMT-5:00", -5 * ONE_HOUR), new TimeZoneInfo("PRT", "Puerto Rico and US Virgin Islands Time", "GMT-4:00", -4 * ONE_HOUR), new TimeZoneInfo("CNT", "Canada Newfoundland Time", "GMT-3:30", -3 * ONE_HOUR - THIRTY_MIN), new TimeZoneInfo("AGT", "Argentina Standard Time", "GMT-3:00", -3 * ONE_HOUR), new TimeZoneInfo("BET", "Brazil Eastern Time", "GMT-3:00", -3 * ONE_HOUR), // 30 new TimeZoneInfo("CAT", "Central African Time", "GMT-1:00", -1 * ONE_HOUR), }; }
1,890
310
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for manipulating DICOM JSON.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from typing import Any, Dict, List, Optional, Text import attr from hcls_imaging_ml_toolkit import tags # The key used for values in DICOM JSON. _VALUE_KEY = 'Value' def Insert(dicom_json: Dict[Text, Any], tag: tags.DicomTag, value: Any) -> None: """Inserts a Dicom Tag into passed DICOM JSON Dict. Args: dicom_json: DICOM JSON dict where the tag will be inserted. See: https://www.dicomstandard.org/dicomweb/dicom-json-format tag: A DICOM tag. value: Any type that will be inserted into dict as the value for the tag. """ tag_value = value if isinstance(value, list) else [value] dicom_json[tag.number] = {'vr': tag.vr, _VALUE_KEY: tag_value} def GetList(dicom_json: Dict[Text, Any], tag: tags.DicomTag) -> Optional[List[Any]]: """Returns the value list for the tag from the provided DICOM JSON. Args: dicom_json: Dictionary containing DICOM JSON. See: https://www.dicomstandard.org/dicomweb/dicom-json-format tag: The tag to return the list of values for. Returns: The value list corresponding to the tag or None if the tag or value list is not present in the dictionary. """ if tag.number not in dicom_json: return None return dicom_json[tag.number].get(_VALUE_KEY) def GetValue(dicom_json: Dict[Text, Any], tag: tags.DicomTag) -> Any: """Returns the first value for the tag from the provided DICOM JSON. Returns the first value from the value list corresponding to the provided tag. For many DICOM tags this is going to be the only value in the list. If no value list exists, returns None. Args: dicom_json: Dictionary containing DICOM JSON. See: https://www.dicomstandard.org/dicomweb/dicom-json-format tag: The tag to return the value for. Returns: The first value from the value list corresponding to the tag or None if: - The tag or value list is not present in the dictionary. - The value list is empty. """ value_list = GetList(dicom_json, tag) return value_list[0] if value_list else None @attr.s class DicomBulkData(object): # URI for the bulkdata. uri = attr.ib(type=Text) # The payload. data = attr.ib(type=bytes) # Content type in the following format: # https://www.w3.org/Protocols/rfc1341/4_Content-Type.html. content_type = attr.ib(type=Text) @attr.s class ObjectWithBulkData(object): """DICOM JSON object with the optional bulk data.""" dicom_dict = attr.ib(type=Dict[Text, Any]) bulkdata_list = attr.ib(type=List[DicomBulkData], factory=list) @property def instance_uid(self) -> Text: """Returns the Instance UID of the DICOM Object based on the DICOM data.""" return GetValue(self.dicom_dict, tags.SOP_INSTANCE_UID) @property def series_uid(self) -> Text: """Returns the Series UID of the DICOM Object based on the DICOM data.""" return GetValue(self.dicom_dict, tags.SERIES_INSTANCE_UID) @property def study_uid(self) -> Text: """Returns the Study UID of the DICOM Object based on the DICOM data.""" return GetValue(self.dicom_dict, tags.STUDY_INSTANCE_UID)
1,293
1,639
public interface IThing { void doAThing(); }
16
726
package org.landy.chain.responsibility.demo2; import org.landy.chain.responsibility.demo2.domain.Request; import org.landy.chain.responsibility.demo2.domain.Response; import java.util.ArrayList; import java.util.List; /** * @author landyl * @create 2:16 PM 05/12/2018 */ public class FilterChain implements Filter { private List<Filter> filters = new ArrayList<Filter>(); int index = 0; //标记执行到第几个filter //把函数的返回值设为FilterChain,返回this,就能方便链式编写代码 public FilterChain addFilter(Filter filter) { filters.add(filter); //代码的设计技巧:Chain链添加过滤规则结束后返回添加后的Chain,方便我们下面doFilter函数的操作 return this; } public void doFilter(Request request, Response response, FilterChain fc) { if(index == filters.size()) return ; Filter f = filters.get(index); index++; f.doFilter(request, response, fc); } }
444
746
package io.openlineage.client; import static io.openlineage.client.TypeResolver.titleCase; import static javax.lang.model.element.Modifier.ABSTRACT; import static javax.lang.model.element.Modifier.FINAL; import static javax.lang.model.element.Modifier.PRIVATE; import static javax.lang.model.element.Modifier.PUBLIC; import static javax.lang.model.element.Modifier.STATIC; import java.io.IOException; import java.io.PrintWriter; import java.net.URI; import java.net.URL; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.UUID; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.MethodSpec.Builder; import com.squareup.javapoet.ParameterSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import io.openlineage.client.TypeResolver.ArrayResolvedType; import io.openlineage.client.TypeResolver.ObjectResolvedType; import io.openlineage.client.TypeResolver.PrimitiveResolvedType; import io.openlineage.client.TypeResolver.ResolvedField; import io.openlineage.client.TypeResolver.ResolvedType; import io.openlineage.client.TypeResolver.ResolvedTypeVisitor; /** * Generates a JavaClass with all the types as inner classes */ public class JavaPoetGenerator { private final TypeResolver typeResolver; private final Map<String, URL> containerToID; private final String containerPackage; private final String containerClassName; private final boolean server; private final String containerClass; public JavaPoetGenerator(TypeResolver typeResolver, String containerPackage, String containerClassName, boolean server, Map<String, URL> containerToID) { this.typeResolver = typeResolver; this.containerPackage = containerPackage; this.containerClassName = containerClassName; this.server = server; if (containerToID == null) { throw new RuntimeException("missing baseURL"); } this.containerToID = containerToID; this.containerClass = containerPackage + "." + containerClassName; } public void generate(PrintWriter printWriter) throws IOException { TypeSpec.Builder containerTypeBuilder = TypeSpec.classBuilder(containerClassName) .addModifiers(PUBLIC, FINAL); if (!server) { containerTypeBuilder.addField(FieldSpec.builder(ClassName.get(URI.class), "producer", PRIVATE, FINAL).build()); containerTypeBuilder.addMethod(MethodSpec.constructorBuilder() .addModifiers(PUBLIC) .addParameter( ParameterSpec.builder(ClassName.get(URI.class), "producer").build() ) .addCode("this.producer = producer;\n") .build()); } generateTypes(containerTypeBuilder); TypeSpec openLineage = containerTypeBuilder.build(); JavaFile javaFile = JavaFile.builder(containerPackage, openLineage) .build(); javaFile.writeTo(printWriter); } private void generateTypes(TypeSpec.Builder containerTypeBuilder) { Collection<ObjectResolvedType> types = typeResolver.getTypes(); for (ObjectResolvedType type : types) { if (type.getName().length() == 0) { // we're generating types that have name (through ref) continue; } if (typeResolver.getBaseTypes().contains(type.getName())) { // interfaces generateInterface(containerTypeBuilder, type); } else { // concrete types // We generate: // A data class // A factory method // A builder class TypeSpec builderClassSpec = builderClass(type); TypeSpec modelClassSpec = modelClass(type); containerTypeBuilder.addType(modelClassSpec); if (! server) { containerTypeBuilder.addMethod(factoryModelMethodUnderContainer(type)); containerTypeBuilder.addMethod(MethodSpec.methodBuilder("new" + type.getName() + "Builder") .addModifiers(PUBLIC) .returns(ClassName.get(containerClass, type.getName() + "Builder")) .addCode("return new $N();", type.getName() + "Builder") .build()); containerTypeBuilder.addType(builderClassSpec); } } } } private MethodSpec modelConstructor(ObjectResolvedType type) { Builder constructor = MethodSpec.constructorBuilder(); if (type.getName().equals("CustomFacet") || server) { constructor.addModifiers(PUBLIC); } else { constructor.addModifiers(PRIVATE); } constructor.addAnnotation(JsonCreator.class); for (ResolvedField f : type.getProperties()) { if (isASchemaUrlField(f)) { String schemaURL = containerToID.get(type.getContainer()) + "#/$defs/" + type.getName(); constructor.addCode("this.$N = URI.create($S);\n", f.getName(), schemaURL); } else { constructor.addJavadoc("@param $N $N\n", f.getName(), f.getDescription() == null ? "the " + f.getName() : f.getDescription()); constructor.addParameter( ParameterSpec.builder(getTypeName(f.getType()), f.getName()) .addAnnotation(AnnotationSpec.builder(JsonProperty.class).addMember("value", "$S", f.getName()).build()) .build()); constructor.addCode("this.$N = $N;\n", f.getName(), f.getName()); } } if (type.hasAdditionalProperties()) { constructor.addCode(CodeBlock.builder().addStatement("this.$N = new $T<>()", "additionalProperties", LinkedHashMap.class).build()); } return constructor.build(); } private TypeSpec modelClass(ObjectResolvedType type) { TypeSpec.Builder modelClassBuilder = TypeSpec.classBuilder(type.getName()) .addModifiers(STATIC, PUBLIC); if (!server) { modelClassBuilder.addAnnotation(AnnotationSpec.builder(JsonDeserialize.class) .addMember("as", CodeBlock.of(type.getName() + ".class")) .build()); } for (ObjectResolvedType parent : type.getParents()) { modelClassBuilder.addSuperinterface(ClassName.get(containerPackage, parent.getContainer(), parent.getName())); } //adds possibility to extend CustomFacet if (!type.getName().equals("CustomFacet")) { modelClassBuilder.addModifiers(FINAL); } com.squareup.javapoet.AnnotationSpec.Builder jsonPropertyOrder = AnnotationSpec.builder(JsonPropertyOrder.class); for (ResolvedField f : type.getProperties()) { modelClassBuilder.addField(getTypeName(f.getType()), f.getName(), PRIVATE, FINAL); MethodSpec getter = getter(f) .addModifiers(PUBLIC) .addCode("return $N;", f.getName()) .build(); modelClassBuilder.addMethod(getter); jsonPropertyOrder.addMember("value", "$S", f.getName()); } if (type.hasAdditionalProperties()) { String fieldName = "additionalProperties"; TypeName additionalPropertiesValueType = type.getAdditionalPropertiesType() == null ? ClassName.get(Object.class) : getTypeName(type.getAdditionalPropertiesType()); TypeName additionalPropertiesType = ParameterizedTypeName.get(ClassName.get(Map.class), ClassName.get(String.class), additionalPropertiesValueType); modelClassBuilder.addMethod(MethodSpec .methodBuilder("get" + titleCase(fieldName)) .addJavadoc("@return additional properties") .returns(additionalPropertiesType) .addModifiers(PUBLIC) .addCode("return $N;", fieldName) .addAnnotation(AnnotationSpec.builder(JsonAnyGetter.class).build()) .build()); modelClassBuilder.addField( FieldSpec.builder(additionalPropertiesType, fieldName, PRIVATE, FINAL) .addAnnotation(JsonAnySetter.class) .build()); } modelClassBuilder.addAnnotation(jsonPropertyOrder.build()); MethodSpec modelConstructor = modelConstructor(type); modelClassBuilder.addMethod(modelConstructor); return modelClassBuilder.build(); } private TypeSpec builderClass(ObjectResolvedType type) { TypeSpec.Builder builderClassBuilder = TypeSpec.classBuilder(type.getName() + "Builder") .addModifiers(PUBLIC, FINAL); boolean producerFiledExist = type.getProperties().stream() .anyMatch(this::isAProducerField); if (!producerFiledExist) builderClassBuilder.addModifiers(STATIC); type.getProperties().stream().filter(f -> !isASchemaUrlField(f)).forEach(f -> { if (!(isAProducerField(f))) { builderClassBuilder.addField(getTypeName(f.getType()), f.getName(), PRIVATE); builderClassBuilder.addMethod( MethodSpec .methodBuilder(f.getName()) .addParameter(getTypeName(f.getType()), f.getName()) .addJavadoc("@param $N $N\n", f.getName(), f.getDescription() == null ? "the " + f.getName() : f.getDescription()) .addModifiers(PUBLIC) .returns(ClassName.get(containerPackage, containerClassName, type.getName() + "Builder")) .addJavadoc("@return this\n") .addCode("this.$N = $N;\n", f.getName(), f.getName()) .addCode("return this;") .build()); } }); if (type.hasAdditionalProperties()) { String fieldName = "additionalProperties"; TypeName additionalPropertiesValueType = type.getAdditionalPropertiesType() == null ? ClassName.get(Object.class) : getTypeName(type.getAdditionalPropertiesType()); TypeName additionalPropertiesType = ParameterizedTypeName.get(ClassName.get(Map.class), ClassName.get(String.class), additionalPropertiesValueType); builderClassBuilder.addField( FieldSpec.builder(additionalPropertiesType, fieldName, PRIVATE, FINAL) .initializer("new $T<>()", LinkedHashMap.class) .build()); builderClassBuilder.addMethod(MethodSpec .methodBuilder("put") .addJavadoc("add additional properties\n") .addModifiers(PUBLIC) .returns(ClassName.get(containerPackage, containerClassName, type.getName() + "Builder")) .addParameter(TypeName.get(String.class), "key") .addJavadoc("@param key the additional property name\n") .addParameter(additionalPropertiesValueType, "value") .addJavadoc("@param value the additional property value\n") .addCode("this.$N.put(key, value);", fieldName) .addCode("return this;", fieldName) .addJavadoc("@return this\n") .build()); } Builder build = builderBuildMethod(type); builderClassBuilder.addMethod(build.build()); return builderClassBuilder.build(); } private Builder builderBuildMethod(ObjectResolvedType type) { List<CodeBlock> builderParams = new ArrayList<>(); type.getProperties().stream().filter(f -> !isASchemaUrlField(f)).forEach(f -> { if (isAProducerField(f)) { builderParams.add(CodeBlock.of(containerClassName + ".this.producer")); } else { builderParams.add(CodeBlock.of("$N", f.getName())); } }); Builder build = MethodSpec .methodBuilder("build") .addModifiers(PUBLIC) .returns(getTypeName(type)) .addCode("$N __result = new $N(", type.getName(), type.getName()) .addCode(CodeBlock.join(builderParams, ", ")) .addCode(");\n"); if (type.hasAdditionalProperties()) { build.addCode("__result.getAdditionalProperties().putAll(additionalProperties);\n"); } build.addCode("return __result;\n"); return build; } private MethodSpec factoryModelMethodUnderContainer(ObjectResolvedType type) { Builder factory = MethodSpec.methodBuilder("new" + type.getName()) .addModifiers(PUBLIC) .returns(getTypeName(type)); List<CodeBlock> factoryParams = new ArrayList<>(); type.getProperties().stream().filter(f -> !isASchemaUrlField(f)).forEach(f -> { if (isAProducerField(f)) { factoryParams.add(CodeBlock.of("this.producer")); } else { factory.addParameter(ParameterSpec.builder(getTypeName(f.getType()), f.getName()).build()); factory.addJavadoc("@param $N $N\n", f.getName(), f.getDescription() == null ? "the " + f.getName() : f.getDescription()); factoryParams.add(CodeBlock.of("$N", f.getName())); } }); factory.addJavadoc("@return $N", type.getName()); factory.addCode("return new $N(", type.getName()); factory.addCode(CodeBlock.join(factoryParams, ", ")); factory.addCode(");\n"); return factory.build(); } private boolean isAProducerField(ResolvedField f) { return f.getName().equals("_producer") || f.getName().equals("producer"); } private boolean isASchemaUrlField(ResolvedField f) { return !server && (f.getName().equals("_schemaURL") || f.getName().equals("schemaURL")); } private void generateInterface(TypeSpec.Builder containerTypeBuilder, ObjectResolvedType type) { TypeSpec.Builder interfaceBuilder = TypeSpec.interfaceBuilder(type.getName()) .addModifiers(STATIC, PUBLIC); generateDefaultImplementation(containerTypeBuilder, type, interfaceBuilder); for (ResolvedField f : type.getProperties()) { MethodSpec getter = getter(f) .addModifiers(ABSTRACT, PUBLIC) .build(); interfaceBuilder.addMethod(getter); } if (type.hasAdditionalProperties()) { String fieldName = "additionalProperties"; TypeName additionalPropertiesValueType = getAdditionalPropertiesValueType(type); TypeName additionalPropertiesType = getAdditionalPropertiesType(additionalPropertiesValueType); interfaceBuilder.addMethod(MethodSpec .methodBuilder("get" + titleCase(fieldName)) .addJavadoc("@return additional properties") .returns(additionalPropertiesType) .addModifiers(PUBLIC, ABSTRACT) .build()); } TypeSpec intrfc = interfaceBuilder.build(); containerTypeBuilder.addType(intrfc); } private void generateDefaultImplementation( TypeSpec.Builder containerTypeBuilder, ObjectResolvedType type, TypeSpec.Builder interfaceBuilder) { /////////////////////////////////////////// // Default implementation to deserialize // /////////////////////////////////////////// if (type.getName().endsWith("Facet") && !type.getName().equals("BaseFacet")) { // adding the annotation to the interface to have a default implementation interfaceBuilder.addAnnotation(AnnotationSpec.builder(JsonDeserialize.class) .addMember("as", CodeBlock.of("Default" + type.getName() + ".class")) .build()); TypeSpec.Builder classBuilder = TypeSpec.classBuilder("Default" + type.getName()) .addModifiers(STATIC, PUBLIC); classBuilder.addSuperinterface(ClassName.get(containerPackage, containerClassName, type.getName())); MethodSpec.Builder constructor = MethodSpec.constructorBuilder() .addModifiers(PUBLIC); constructor.addAnnotation(JsonCreator.class); List<String> fieldNames = new ArrayList<String>(); for (ResolvedField f : type.getProperties()) { classBuilder.addField(getTypeName(f.getType()), f.getName(), PRIVATE, FINAL); fieldNames.add(f.getName()); if (isASchemaUrlField(f)) { setSchemaURLField(type, constructor, f); } else { addConstructorParameter(constructor, f); } MethodSpec getter = getter(f) .addModifiers(PUBLIC) .addCode("return $N;", f.getName()) .build(); classBuilder.addMethod(getter); } // additionalFields if (type.hasAdditionalProperties()) { addAdditionalProperties(type, classBuilder, constructor); } classBuilder.addMethod(constructor.build()); containerTypeBuilder.addType(classBuilder.build()); Builder factory = MethodSpec.methodBuilder("new" + type.getName()) .addModifiers(PUBLIC) .returns(getTypeName(type)); List<CodeBlock> factoryParams = new ArrayList<>(); type.getProperties().stream().filter(f -> !isASchemaUrlField(f)).forEach(f -> { if (isAProducerField(f)) { factoryParams.add(CodeBlock.of("this.producer")); } else { factory.addParameter(ParameterSpec.builder(getTypeName(f.getType()), f.getName()).build()); factory.addJavadoc("@param $N $N\n", f.getName(), f.getDescription() == null ? "the " + f.getName() : f.getDescription()); factoryParams.add(CodeBlock.of("$N", f.getName())); } }); factory.addJavadoc("@return $N", type.getName()); factory.addCode("return new $N(", "Default" + type.getName()); factory.addCode(CodeBlock.join(factoryParams, ", ")); factory.addCode(");\n"); containerTypeBuilder.addMethod(factory.build()); } /////////////////////////////// } private void addParameterFromField(MethodSpec.Builder factory, ResolvedField f, AnnotationSpec annotationSpec) { com.squareup.javapoet.ParameterSpec.Builder paramSpecBuilder = ParameterSpec.builder(getTypeName(f.getType()), f.getName()); if (annotationSpec != null) { paramSpecBuilder.addAnnotation(annotationSpec); } factory.addParameter(paramSpecBuilder.build()); factory.addJavadoc("@param $N $N\n", f.getName(), f.getDescription() == null ? "the " + f.getName() : f.getDescription()); } private ParameterizedTypeName getAdditionalPropertiesType( TypeName additionalPropertiesValueType) { return ParameterizedTypeName.get(ClassName.get(Map.class), ClassName.get(String.class), additionalPropertiesValueType); } private TypeName getAdditionalPropertiesValueType(ObjectResolvedType type) { return type.getAdditionalPropertiesType() == null ? ClassName.get(Object.class) : getTypeName(type.getAdditionalPropertiesType()); } private void addAdditionalProperties( ObjectResolvedType type, TypeSpec.Builder classBuilder, MethodSpec.Builder constructor) { String fieldName = "additionalProperties"; TypeName additionalPropertiesValueType = getAdditionalPropertiesValueType(type); TypeName additionalPropertiesType = getAdditionalPropertiesType(additionalPropertiesValueType); classBuilder.addMethod(MethodSpec .methodBuilder("get" + titleCase(fieldName)) .addJavadoc("@return additional properties") .returns(additionalPropertiesType) .addModifiers(PUBLIC) .addCode("return $N;", fieldName) .addAnnotation(AnnotationSpec.builder(JsonAnyGetter.class).build()) .build()); classBuilder.addField( FieldSpec.builder(additionalPropertiesType, fieldName, PRIVATE, FINAL) .addAnnotation(JsonAnySetter.class) .build()); constructor.addCode(CodeBlock.builder().addStatement("this.$N = new $T<>()", fieldName, LinkedHashMap.class).build()); } private void addConstructorParameter(MethodSpec.Builder constructor, ResolvedField f) { addParameterFromField(constructor, f, AnnotationSpec.builder(JsonProperty.class).addMember("value", "$S", f.getName()).build()); constructor.addCode("this.$N = $N;\n", f.getName(), f.getName()); } private void setSchemaURLField( ObjectResolvedType type, MethodSpec.Builder constructor, ResolvedField f) { String schemaURL = containerToID.get(type.getContainer()) + "#/$defs/" + type.getName(); constructor.addCode("this.$N = URI.create($S);\n", f.getName(), schemaURL); } private Builder getter(ResolvedField f) { Builder builder = MethodSpec .methodBuilder("get" + titleCase(f.getName())) .returns(getTypeName(f.getType())); if (f.getDescription() != null) { builder.addJavadoc("@return $N", f.getDescription()); } return builder; } public TypeName getTypeName(ResolvedType type) { return type.accept(new ResolvedTypeVisitor<TypeName>() { @Override public TypeName visit(PrimitiveResolvedType primitiveType) { if (primitiveType.getName().equals("integer")) { return ClassName.get(Long.class); } else if (primitiveType.getName().equals("number")) { return ClassName.get(Double.class); } else if (primitiveType.getName().equals("boolean")) { return ClassName.get(Boolean.class); } else if (primitiveType.getName().equals("string")) { if (primitiveType.getFormat() != null) { String format = primitiveType.getFormat(); if (format.equals("uri")) { return ClassName.get(URI.class); } else if (format.equals("date-time")) { return ClassName.get(ZonedDateTime.class); } else if (format.equals("uuid")) { return ClassName.get(UUID.class); } else { throw new RuntimeException("Unknown format: " + primitiveType.getFormat()); } } return ClassName.get(String.class); } throw new RuntimeException("Unknown primitive: " + primitiveType.getName()); } @Override public TypeName visit(ObjectResolvedType objectType) { return ClassName.get(containerClass, objectType.getName()); } @Override public TypeName visit(ArrayResolvedType arrayType) { return ParameterizedTypeName.get(ClassName.get(List.class), getTypeName(arrayType.getItems())); } }); } }
8,257
2,151
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef SERVICES_DEVICE_GENERIC_SENSOR_PLATFORM_SENSOR_ACCELEROMETER_MAC_H_ #define SERVICES_DEVICE_GENERIC_SENSOR_PLATFORM_SENSOR_ACCELEROMETER_MAC_H_ #include <memory> #include "base/timer/timer.h" #include "services/device/generic_sensor/platform_sensor.h" class SuddenMotionSensor; namespace device { // Implementation of PlatformSensor for macOS to query the accelerometer // sensor. // This is a single instance object per browser process which is created by // The singleton PlatformSensorProviderMac. If there are no clients, this // instance is not created. class PlatformSensorAccelerometerMac : public PlatformSensor { public: // Construct a platform sensor of type ACCELEROMETER, given a buffer |mapping| // where readings will be written. PlatformSensorAccelerometerMac(SensorReadingSharedBuffer* reading_buffer, PlatformSensorProvider* provider); mojom::ReportingMode GetReportingMode() override; // Can only be called once, the first time or after a StopSensor call. bool StartSensor(const PlatformSensorConfiguration& configuration) override; void StopSensor() override; protected: ~PlatformSensorAccelerometerMac() override; bool CheckSensorConfiguration( const PlatformSensorConfiguration& configuration) override; PlatformSensorConfiguration GetDefaultConfiguration() override; private: void PollForData(); std::unique_ptr<SuddenMotionSensor> sudden_motion_sensor_; SensorReading reading_; // Repeating timer for data polling. base::RepeatingTimer timer_; DISALLOW_COPY_AND_ASSIGN(PlatformSensorAccelerometerMac); }; } // namespace device #endif // SERVICES_DEVICE_GENERIC_SENSOR_PLATFORM_SENSOR_ACCELEROMETER_MAC_H_
562
2,816
<gh_stars>1000+ #include "duckdb/function/scalar/nested_functions.hpp" #include "duckdb/function/aggregate/nested_functions.hpp" #include "duckdb/planner/expression/bound_aggregate_expression.hpp" #include "duckdb/common/pair.hpp" #include "duckdb/planner/expression/bound_function_expression.hpp" #include "duckdb/common/map.hpp" #include "duckdb/common/types/vector.hpp" namespace duckdb { template <class T> struct HistogramAggState { map<T, idx_t> *hist; }; struct HistogramFunction { template <class STATE> static void Initialize(STATE *state) { state->hist = nullptr; } template <class STATE> static void Destroy(STATE *state) { if (state->hist) { delete state->hist; } } static bool IgnoreNull() { return true; } }; template <class T> static void HistogramUpdateFunction(Vector inputs[], FunctionData *, idx_t input_count, Vector &state_vector, idx_t count) { D_ASSERT(input_count == 1); auto &input = inputs[0]; VectorData sdata; state_vector.Orrify(count, sdata); VectorData input_data; input.Orrify(count, input_data); auto states = (HistogramAggState<T> **)sdata.data; for (idx_t i = 0; i < count; i++) { if (input_data.validity.RowIsValid(input_data.sel->get_index(i))) { auto state = states[sdata.sel->get_index(i)]; if (!state->hist) { state->hist = new map<T, idx_t>(); } auto value = (T *)input_data.data; (*state->hist)[value[input_data.sel->get_index(i)]]++; } } } static void HistogramUpdateFunctionString(Vector inputs[], FunctionData *, idx_t input_count, Vector &state_vector, idx_t count) { D_ASSERT(input_count == 1); auto &input = inputs[0]; VectorData sdata; state_vector.Orrify(count, sdata); VectorData input_data; input.Orrify(count, input_data); auto states = (HistogramAggState<string> **)sdata.data; for (idx_t i = 0; i < count; i++) { if (input_data.validity.RowIsValid(input_data.sel->get_index(i))) { auto state = states[sdata.sel->get_index(i)]; if (!state->hist) { state->hist = new map<string, idx_t>(); } auto value = (string_t *)input_data.data; (*state->hist)[value[input_data.sel->get_index(i)].GetString()]++; } } } template <class T> static void HistogramCombineFunction(Vector &state, Vector &combined, idx_t count) { VectorData sdata; state.Orrify(count, sdata); auto states_ptr = (HistogramAggState<T> **)sdata.data; auto combined_ptr = FlatVector::GetData<HistogramAggState<T> *>(combined); for (idx_t i = 0; i < count; i++) { auto state = states_ptr[sdata.sel->get_index(i)]; if (!state->hist) { continue; } if (!combined_ptr[i]->hist) { combined_ptr[i]->hist = new map<T, idx_t>(); } D_ASSERT(combined_ptr[i]->hist); D_ASSERT(state->hist); for (auto &entry : *state->hist) { (*combined_ptr[i]->hist)[entry.first] += entry.second; } } } template <class T> static void HistogramFinalize(Vector &state_vector, FunctionData *, Vector &result, idx_t count, idx_t offset) { VectorData sdata; state_vector.Orrify(count, sdata); auto states = (HistogramAggState<T> **)sdata.data; auto &mask = FlatVector::Validity(result); auto &child_entries = StructVector::GetEntries(result); auto &bucket_list = child_entries[0]; auto &count_list = child_entries[1]; auto old_len = ListVector::GetListSize(*bucket_list); auto &bucket_validity = FlatVector::Validity(*bucket_list); auto &count_validity = FlatVector::Validity(*count_list); for (idx_t i = 0; i < count; i++) { const auto rid = i + offset; auto state = states[sdata.sel->get_index(i)]; if (!state->hist) { mask.SetInvalid(rid); bucket_validity.SetInvalid(rid); count_validity.SetInvalid(rid); continue; } for (auto &entry : *state->hist) { auto bucket_value = Value::CreateValue(entry.first); ListVector::PushBack(*bucket_list, bucket_value); auto count_value = Value::CreateValue(entry.second); ListVector::PushBack(*count_list, count_value); } auto list_struct_data = FlatVector::GetData<list_entry_t>(*bucket_list); list_struct_data[rid].length = ListVector::GetListSize(*bucket_list) - old_len; list_struct_data[rid].offset = old_len; list_struct_data = FlatVector::GetData<list_entry_t>(*count_list); list_struct_data[rid].length = ListVector::GetListSize(*count_list) - old_len; list_struct_data[rid].offset = old_len; old_len = list_struct_data[rid].length; } } unique_ptr<FunctionData> HistogramBindFunction(ClientContext &context, AggregateFunction &function, vector<unique_ptr<Expression>> &arguments) { D_ASSERT(arguments.size() == 1); child_list_t<LogicalType> struct_children; struct_children.push_back({"bucket", LogicalType::LIST(arguments[0]->return_type)}); struct_children.push_back({"count", LogicalType::LIST(LogicalType::UBIGINT)}); auto struct_type = LogicalType::MAP(move(struct_children)); function.return_type = struct_type; return make_unique<VariableReturnBindData>(function.return_type); } template <typename T> AggregateFunction GetHistogramFunction(const LogicalType &type) { using STATE_TYPE = HistogramAggState<T>; return AggregateFunction("histogram", {type}, LogicalTypeId::MAP, AggregateFunction::StateSize<STATE_TYPE>, AggregateFunction::StateInitialize<STATE_TYPE, HistogramFunction>, HistogramUpdateFunction<T>, HistogramCombineFunction<T>, HistogramFinalize<T>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<STATE_TYPE, HistogramFunction>); } AggregateFunction GetHistogramFunction(PhysicalType type) { switch (type) { case PhysicalType::UINT16: return AggregateFunction("histogram", {LogicalType::USMALLINT}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<uint16_t>>, AggregateFunction::StateInitialize<HistogramAggState<uint16_t>, HistogramFunction>, HistogramUpdateFunction<uint16_t>, HistogramCombineFunction<uint16_t>, HistogramFinalize<uint16_t>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<uint16_t>, HistogramFunction>); case PhysicalType::UINT32: return AggregateFunction("histogram", {LogicalType::UINTEGER}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<uint32_t>>, AggregateFunction::StateInitialize<HistogramAggState<uint32_t>, HistogramFunction>, HistogramUpdateFunction<uint32_t>, HistogramCombineFunction<uint32_t>, HistogramFinalize<uint32_t>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<uint32_t>, HistogramFunction>); case PhysicalType::UINT64: return AggregateFunction("histogram", {LogicalType::UBIGINT}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<uint64_t>>, AggregateFunction::StateInitialize<HistogramAggState<uint64_t>, HistogramFunction>, HistogramUpdateFunction<uint64_t>, HistogramCombineFunction<uint64_t>, HistogramFinalize<uint64_t>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<uint64_t>, HistogramFunction>); case PhysicalType::INT16: return AggregateFunction("histogram", {LogicalType::SMALLINT}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<int16_t>>, AggregateFunction::StateInitialize<HistogramAggState<int16_t>, HistogramFunction>, HistogramUpdateFunction<int16_t>, HistogramCombineFunction<int16_t>, HistogramFinalize<int16_t>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<int16_t>, HistogramFunction>); case PhysicalType::INT32: return AggregateFunction("histogram", {LogicalType::INTEGER}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<int32_t>>, AggregateFunction::StateInitialize<HistogramAggState<int32_t>, HistogramFunction>, HistogramUpdateFunction<int32_t>, HistogramCombineFunction<int32_t>, HistogramFinalize<int32_t>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<int32_t>, HistogramFunction>); case PhysicalType::INT64: return AggregateFunction("histogram", {LogicalType::BIGINT}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<int64_t>>, AggregateFunction::StateInitialize<HistogramAggState<int64_t>, HistogramFunction>, HistogramUpdateFunction<int64_t>, HistogramCombineFunction<int64_t>, HistogramFinalize<int64_t>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<int64_t>, HistogramFunction>); case PhysicalType::FLOAT: return AggregateFunction("histogram", {LogicalType::FLOAT}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<float>>, AggregateFunction::StateInitialize<HistogramAggState<float>, HistogramFunction>, HistogramUpdateFunction<float>, HistogramCombineFunction<float>, HistogramFinalize<float>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<float>, HistogramFunction>); case PhysicalType::DOUBLE: return AggregateFunction("histogram", {LogicalType::DOUBLE}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<double>>, AggregateFunction::StateInitialize<HistogramAggState<double>, HistogramFunction>, HistogramUpdateFunction<double>, HistogramCombineFunction<double>, HistogramFinalize<double>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<double>, HistogramFunction>); case PhysicalType::VARCHAR: return AggregateFunction("histogram", {LogicalType::VARCHAR}, LogicalTypeId::MAP, AggregateFunction::StateSize<HistogramAggState<string>>, AggregateFunction::StateInitialize<HistogramAggState<string>, HistogramFunction>, HistogramUpdateFunctionString, HistogramCombineFunction<string>, HistogramFinalize<string>, nullptr, HistogramBindFunction, AggregateFunction::StateDestroy<HistogramAggState<string>, HistogramFunction>); default: throw InternalException("Unimplemented histogram aggregate"); } } void HistogramFun::RegisterFunction(BuiltinFunctions &set) { AggregateFunctionSet fun("histogram"); fun.AddFunction(GetHistogramFunction(PhysicalType::UINT16)); fun.AddFunction(GetHistogramFunction(PhysicalType::UINT32)); fun.AddFunction(GetHistogramFunction(PhysicalType::UINT64)); fun.AddFunction(GetHistogramFunction(PhysicalType::INT16)); fun.AddFunction(GetHistogramFunction(PhysicalType::INT32)); fun.AddFunction(GetHistogramFunction(PhysicalType::INT64)); fun.AddFunction(GetHistogramFunction(PhysicalType::FLOAT)); fun.AddFunction(GetHistogramFunction(PhysicalType::DOUBLE)); fun.AddFunction(GetHistogramFunction(PhysicalType::VARCHAR)); fun.AddFunction(GetHistogramFunction<int64_t>(LogicalType::TIMESTAMP)); fun.AddFunction(GetHistogramFunction<int64_t>(LogicalType::TIMESTAMP_TZ)); set.AddFunction(fun); } } // namespace duckdb
4,953
365
import math import torch # Through the masking variable we have access to the following variables/statistics. ''' Access to optimizer: masking.optimizer Access to momentum/Adam update: masking.get_momentum_for_weight(weight) Accessable global statistics: Layer statistics: Non-zero count of layer: masking.name2nonzeros[name] Zero count of layer: masking.name2zeros[name] Redistribution proportion: masking.name2variance[name] Number of items removed through pruning: masking.name2removed[name] Network statistics: Total number of nonzero parameter in the network: masking.total_nonzero = 0 Total number of zero-valued parameter in the network: masking.total_zero = 0 Total number of parameters removed in pruning: masking.total_removed = 0 ''' def your_redistribution(masking, name, weight, mask): ''' Returns: Layer importance The unnormalized layer importance statistic for the layer "name". A higher value indicates that more pruned parameters are redistributed to this layer compared to layers with lower value. The values will be automatically sum-normalized after this step. ''' return layer_importance #===========================================================# # EXAMPLE # #===========================================================# def variance_redistribution(masking, name, weight, mask): '''Return the mean variance of existing weights. Higher gradient variance means a layer does not have enough capacity to model the inputs with the current number of weights. Thus we want to add more weights if we have higher variance. If variance of the gradient stabilizes this means that some weights might be useless/not needed. ''' # Adam calculates the running average of the sum of square for us # This is similar to RMSProp. if 'exp_avg_sq' not in masking.optimizer.state[weight]: print('Variance redistribution requires the adam optimizer to be run!') raise Exception('Variance redistribution requires the adam optimizer to be run!') iv_adam_sumsq = torch.sqrt(masking.optimizer.state[weight]['exp_avg_sq']) layer_importance = iv_adam_sumsq[mask.bool()].mean().item() return layer_importance def your_pruning(masking, mask, weight, name): """Returns: mask Pruned Binary mask where 1s indicated active weights. Can be modified in-place or newly constructed """ return mask #===========================================================# # EXAMPLE # #===========================================================# def magnitude_variance_pruning(masking, mask, weight, name): ''' Prunes weights which have high gradient variance and low magnitude. Intuition: Weights that are large are important but there is also a dimension of reliability. If a large weight makes a large correct prediction 8/10 times is it better than a medium weight which makes a correct prediction 10/10 times? To test this, we combine magnitude (importance) with reliability (variance of gradient). Good: Weights with large magnitude and low gradient variance are the most important. Weights with medium variance/magnitude are promising for improving network performance. Bad: Weights with large magnitude but high gradient variance hurt performance. Weights with small magnitude and low gradient variance are useless. Weights with small magnitude and high gradient variance cannot learn anything usefull. We here take the geometric mean of those both normalized distribution to find weights to prune. ''' # Adam calculates the running average of the sum of square for us # This is similar to RMSProp. We take the inverse of this to rank # low variance gradients higher. if 'exp_avg_sq' not in masking.optimizer.state[weight]: print('Magnitude variance pruning requires the adam optimizer to be run!') raise Exception('Magnitude variance pruning requires the adam optimizer to be run!') iv_adam_sumsq = 1./torch.sqrt(masking.optimizer.state[weight]['exp_avg_sq']) num_remove = math.ceil(masking.name2prune_rate[name]*masking.name2nonzeros[name]) num_zeros = masking.name2zeros[name] k = math.ceil(num_zeros + num_remove) if num_remove == 0.0: return weight.data != 0.0 max_var = iv_adam_sumsq[mask.bool()].max().item() max_magnitude = torch.abs(weight.data[mask.bool()]).max().item() product = ((iv_adam_sumsq/max_var)*torch.abs(weight.data)/max_magnitude)*mask product[mask==0] = 0.0 x, idx = torch.sort(product.view(-1)) mask.data.view(-1)[idx[:k]] = 0.0 return mask def your_growth(masking, name, new_mask, total_regrowth, weight): ''' Returns: mask Binary mask with newly grown weights. 1s indicated active weights in the binary mask. ''' return new_mask
1,909
697
<reponame>clinuxrulz/sodium package chapter4.section11; import pump.*; import chapter4.section4.LifeCycle; import chapter4.section4.LifeCycle.End; import chapter4.section6.AccumulatePulsesPump; import chapter4.section7.Fill; import chapter4.section7.ShowDollarsPump; import chapter4.section8.NotifyPointOfSale; import chapter4.section9.Keypad; import nz.sodium.*; import java.util.Optional; public class PresetAmountPump implements Pump { public Outputs create(Inputs inputs) { StreamLoop<Fuel> sStart = new StreamLoop<>(); Fill fi = new Fill(inputs.sClearSale.map(u -> Unit.UNIT), inputs.sFuelPulses, inputs.calibration, inputs.price1, inputs.price2, inputs.price3, sStart); NotifyPointOfSale np = new NotifyPointOfSale( new LifeCycle(inputs.sNozzle1, inputs.sNozzle2, inputs.sNozzle3), inputs.sClearSale, fi); sStart.loop(np.sStart); CellLoop<Boolean> keypadActive = new CellLoop<>(); Keypad ke = new Keypad(inputs.sKeypad, inputs.sClearSale, keypadActive); Preset pr = new Preset(ke.value, fi, np.fuelFlowing, np.fillActive.map(o -> o.isPresent())); keypadActive.loop(pr.keypadActive); return new Outputs() .setDelivery(pr.delivery) .setSaleCostLCD(fi.dollarsDelivered.map( q -> Formatters.formatSaleCost(q))) .setSaleQuantityLCD(fi.litersDelivered.map( q -> Formatters.formatSaleQuantity(q))) .setPriceLCD1(ShowDollarsPump.priceLCD(np.fillActive, fi.price, Fuel.ONE, inputs)) .setPriceLCD2(ShowDollarsPump.priceLCD(np.fillActive, fi.price, Fuel.TWO, inputs)) .setPriceLCD3(ShowDollarsPump.priceLCD(np.fillActive, fi.price, Fuel.THREE, inputs)) .setSaleComplete(np.sSaleComplete) .setPresetLCD(ke.value.map(v -> Formatters.formatSaleCost((double)v))) .setBeep(np.sBeep.orElse(ke.sBeep)); } }
1,266
4,057
# -*- coding: utf-8 -*- import sys from django.core.management.base import LabelCommand from django.template import TemplateDoesNotExist, loader from django_extensions.management.utils import signalcommand class Command(LabelCommand): help = "Finds the location of the given template by resolving its path" args = "[template_path]" label = 'template path' @signalcommand def handle_label(self, template_path, **options): try: template = loader.get_template(template_path).template except TemplateDoesNotExist: sys.stderr.write("No template found\n") else: sys.stdout.write(self.style.SUCCESS((template.name)))
252
2,496
<gh_stars>1000+ /** * Copyright (c) 2006-2021 LOVE Development Team * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. **/ #ifndef LOVE_TYPES_H #define LOVE_TYPES_H #include "int.h" // STD #include <bitset> #include <vector> namespace love { class Type { public: static const uint32 MAX_TYPES = 128; Type(const char *name, Type *parent); Type(const Type&) = delete; static Type *byName(const char *name); void init(); uint32 getId(); const char *getName() const; bool isa(const uint32 &other) { if (!inited) init(); return bits[other]; } bool isa(const Type &other) { if (!inited) init(); // Note that if this type implements the other // calling init above will also have inited // the other. return bits[other.id]; } private: const char * const name; Type * const parent; uint32 id; bool inited; std::bitset<MAX_TYPES> bits; }; } // love #endif // LOVE_TYPES_H
543
369
[ { "id": 1, "name": "Driwwwle", "image": "https://imgur.com/kbGs6zH.png", "description": "A platform for web developers to showcase their projects", "tags": ["next", "node", "mongodb", "tailwind", "react-query"], "source_code": "https://github.com/itsnitinr/driwwwle-v2", "demo": "https://driwwwle.com/" }, { "id": 2, "name": "<NAME>", "image": "https://imgur.com/tAZotn3.png", "description": "E-commerce website for a PCB manufacturing company", "tags": ["react", "node", "mongodb", "redux", "razorpay", "material-ui"], "demo": "https://www.pcbcupid.com/" }, { "id": 3, "name": "HealthyWays", "image": "https://imgur.com/8FFqaX8.png", "description": "A food ordering platform for healthy and home-cooked meals", "tags": ["react", "node", "mongodb", "redux", "razorpay", "material-ui"], "source_code": "https://github.com/itsnitinr/healthyways", "demo": "https://healthyways.herokuapp.com/" }, { "id": 4, "name": "VSCode Portfolio", "image": "https://imgur.com/mKkz0iz.png", "description": "A themeable and developer-centric portfolio based on the VSCode", "tags": ["next", "css-modules"], "source_code": "https://github.com/itsnitinr/vscode-portfolio", "demo": "http://vscode-portfolio.vercel.app/" } ]
547
11,545
#define REDISMODULE_EXPERIMENTAL_API #include "redismodule.h" #include <strings.h> #include <assert.h> #include <unistd.h> #include <errno.h> #define UNUSED(V) ((void) V) /* A sample movable keys command that returns a list of all * arguments that follow a KEY argument, i.e. */ int getkeys_command(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { int i; int count = 0; /* Handle getkeys-api introspection */ if (RedisModule_IsKeysPositionRequest(ctx)) { for (i = 0; i < argc; i++) { size_t len; const char *str = RedisModule_StringPtrLen(argv[i], &len); if (len == 3 && !strncasecmp(str, "key", 3) && i + 1 < argc) RedisModule_KeyAtPos(ctx, i + 1); } return REDISMODULE_OK; } /* Handle real command invocation */ RedisModule_ReplyWithArray(ctx, REDISMODULE_POSTPONED_LEN); for (i = 0; i < argc; i++) { size_t len; const char *str = RedisModule_StringPtrLen(argv[i], &len); if (len == 3 && !strncasecmp(str, "key", 3) && i + 1 < argc) { RedisModule_ReplyWithString(ctx, argv[i+1]); count++; } } RedisModule_ReplySetArrayLength(ctx, count); return REDISMODULE_OK; } int getkeys_fixed(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { int i; RedisModule_ReplyWithArray(ctx, argc - 1); for (i = 1; i < argc; i++) { RedisModule_ReplyWithString(ctx, argv[i]); } return REDISMODULE_OK; } /* Introspect a command using RM_GetCommandKeys() and returns the list * of keys. Essentially this is COMMAND GETKEYS implemented in a module. */ int getkeys_introspect(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { UNUSED(argv); UNUSED(argc); if (argc < 3) { RedisModule_WrongArity(ctx); return REDISMODULE_OK; } int num_keys; int *keyidx = RedisModule_GetCommandKeys(ctx, &argv[1], argc - 1, &num_keys); if (!keyidx) { if (!errno) RedisModule_ReplyWithEmptyArray(ctx); else { char err[100]; switch (errno) { case ENOENT: RedisModule_ReplyWithError(ctx, "ERR ENOENT"); break; case EINVAL: RedisModule_ReplyWithError(ctx, "ERR EINVAL"); break; default: snprintf(err, sizeof(err) - 1, "ERR errno=%d", errno); RedisModule_ReplyWithError(ctx, err); break; } } } else { int i; RedisModule_ReplyWithArray(ctx, num_keys); for (i = 0; i < num_keys; i++) RedisModule_ReplyWithString(ctx, argv[1 + keyidx[i]]); RedisModule_Free(keyidx); } return REDISMODULE_OK; } int RedisModule_OnLoad(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { UNUSED(argv); UNUSED(argc); if (RedisModule_Init(ctx,"getkeys",1,REDISMODULE_APIVER_1)== REDISMODULE_ERR) return REDISMODULE_ERR; if (RedisModule_CreateCommand(ctx,"getkeys.command", getkeys_command,"getkeys-api",0,0,0) == REDISMODULE_ERR) return REDISMODULE_ERR; if (RedisModule_CreateCommand(ctx,"getkeys.fixed", getkeys_fixed,"",2,4,1) == REDISMODULE_ERR) return REDISMODULE_ERR; if (RedisModule_CreateCommand(ctx,"getkeys.introspect", getkeys_introspect,"",0,0,0) == REDISMODULE_ERR) return REDISMODULE_ERR; return REDISMODULE_OK; }
1,706
428
<reponame>cping/LGame<filename>Java/old/OpenGL-1.0(old_ver)/Loon-backend-Android/src/loon/core/geom/Polygon.java<gh_stars>100-1000 /** * * Copyright 2008 - 2011 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * @project loon * @author cping * @email:<EMAIL> * @version 0.1 */ package loon.core.geom; import java.util.ArrayList; import loon.physics.PPolygon; import loon.utils.CollectionUtils; public class Polygon extends Shape { /** * */ private static final long serialVersionUID = 1L; public static class Polygon2i { public int npoints; public int[] xpoints; public int[] ypoints; private static final int MIN_LENGTH = 4; public Polygon2i() { xpoints = new int[MIN_LENGTH]; ypoints = new int[MIN_LENGTH]; } public Polygon2i(int xpoints[], int ypoints[], int npoints) { if (npoints > xpoints.length || npoints > ypoints.length) { throw new IndexOutOfBoundsException( "npoints > xpoints.length || " + "npoints > ypoints.length"); } if (npoints < 0) { throw new NegativeArraySizeException("npoints < 0"); } this.npoints = npoints; this.xpoints = CollectionUtils.copyOf(xpoints, npoints); this.ypoints = CollectionUtils.copyOf(ypoints, npoints); } public void addPoint(int x, int y) { if (npoints >= xpoints.length || npoints >= ypoints.length) { int newLength = (npoints * 2); if (newLength < MIN_LENGTH) { newLength = MIN_LENGTH; } else if ((newLength & (newLength - 1)) != 0) { newLength = Integer.highestOneBit(newLength); } xpoints = CollectionUtils.copyOf(xpoints, newLength); ypoints = CollectionUtils.copyOf(ypoints, newLength); } xpoints[npoints] = x; ypoints[npoints] = y; npoints++; } public int[] getVertices() { int vertice_size = xpoints.length * 2; int[] verts = new int[vertice_size]; for (int i = 0, j = 0; i < vertice_size; i += 2, j++) { verts[i] = xpoints[j]; verts[i + 1] = ypoints[j]; } return verts; } public void reset() { npoints = 0; xpoints = new int[MIN_LENGTH]; ypoints = new int[MIN_LENGTH]; } } private boolean allowDups = false; private boolean closed = true; public Polygon(float[] points) { int length = points.length; this.points = new float[length]; maxX = -Float.MIN_VALUE; maxY = -Float.MIN_VALUE; minX = Float.MAX_VALUE; minY = Float.MAX_VALUE; x = Float.MAX_VALUE; y = Float.MAX_VALUE; for (int i = 0; i < length; i++) { this.points[i] = points[i]; if (i % 2 == 0) { if (points[i] > maxX) { maxX = points[i]; } if (points[i] < minX) { minX = points[i]; } if (points[i] < x) { x = points[i]; } } else { if (points[i] > maxY) { maxY = points[i]; } if (points[i] < minY) { minY = points[i]; } if (points[i] < y) { y = points[i]; } } } findCenter(); calculateRadius(); pointsDirty = true; } public Polygon() { points = new float[0]; maxX = -Float.MIN_VALUE; maxY = -Float.MIN_VALUE; minX = Float.MAX_VALUE; minY = Float.MAX_VALUE; } public Polygon(float[] xpoints, float[] ypoints, int npoints) { if (npoints > xpoints.length || npoints > ypoints.length) { throw new IndexOutOfBoundsException("npoints > xpoints.length || " + "npoints > ypoints.length"); } if (npoints < 0) { throw new NegativeArraySizeException("npoints < 0"); } points = new float[0]; maxX = -Float.MIN_VALUE; maxY = -Float.MIN_VALUE; minX = Float.MAX_VALUE; minY = Float.MAX_VALUE; for (int i = 0; i < npoints; i++) { addPoint(xpoints[i], ypoints[i]); } } public Polygon(int[] xpoints, int[] ypoints, int npoints) { if (npoints > xpoints.length || npoints > ypoints.length) { throw new IndexOutOfBoundsException("npoints > xpoints.length || " + "npoints > ypoints.length"); } if (npoints < 0) { throw new NegativeArraySizeException("npoints < 0"); } points = new float[0]; maxX = -Float.MIN_VALUE; maxY = -Float.MIN_VALUE; minX = Float.MAX_VALUE; minY = Float.MAX_VALUE; for (int i = 0; i < npoints; i++) { addPoint(xpoints[i], ypoints[i]); } } public PPolygon getPPolygon(float scale) { return new PPolygon(points,scale); } public void setAllowDuplicatePoints(boolean allowDups) { this.allowDups = allowDups; } public void addPoint(float x, float y) { if (hasVertex(x, y) && (!allowDups)) { return; } ArrayList<Float> tempPoints = new ArrayList<Float>(); for (int i = 0; i < points.length; i++) { tempPoints.add(points[i]); } tempPoints.add(x); tempPoints.add(y); int length = tempPoints.size(); this.points = new float[length]; for (int i = 0; i < length; i++) { points[i] = tempPoints.get(i); } if (x > maxX) { maxX = x; } if (y > maxY) { maxY = y; } if (x < minX) { minX = x; } if (y < minY) { minY = y; } findCenter(); calculateRadius(); pointsDirty = true; } @Override public Shape transform(Matrix transform) { checkPoints(); Polygon resultPolygon = new Polygon(); float result[] = new float[points.length]; transform.transform(points, 0, result, 0, points.length / 2); resultPolygon.points = result; resultPolygon.findCenter(); resultPolygon.closed = closed; return resultPolygon; } @Override public void setX(float x) { super.setX(x); pointsDirty = false; } @Override public void setY(float y) { super.setY(y); pointsDirty = false; } @Override protected void createPoints() { } @Override public boolean closed() { return closed; } public void setClosed(boolean closed) { this.closed = closed; } public Polygon copy() { float[] copyPoints = new float[points.length]; System.arraycopy(points, 0, copyPoints, 0, copyPoints.length); return new Polygon(copyPoints); } }
2,597
30,785
<reponame>mazhidong/jadx package jadx.tests.integration.types; import org.junit.jupiter.api.Test; import jadx.NotYetImplemented; import jadx.core.dex.nodes.ClassNode; import jadx.tests.api.IntegrationTest; import static jadx.tests.api.utils.JadxMatchers.containsOne; import static org.hamcrest.MatcherAssert.assertThat; public class TestGenerics4 extends IntegrationTest { public static class TestCls { public static class Inner<T> { public void overload(IList<? super T> list) { } public void overload(T t) { } } public interface IList<T> { void list(T t); } public static class ObjIList implements IList<Object> { @Override public void list(Object o) { } } public Inner<Object> test() { Inner<Object> inner = new Inner<>(); inner.overload(new ObjIList()); return inner; } } @Test public void test() { ClassNode cls = getClassNode(TestCls.class); String code = cls.getCode().toString(); assertThat(code, containsOne("public static class ObjIList implements IList<Object> {")); assertThat(code, containsOne("Inner<Object> inner = new Inner<>();")); assertThat(code, containsOne("inner.overload((IList<? super Object>) new ObjIList());")); } @NotYetImplemented @Test public void testOmitCast() { ClassNode cls = getClassNode(TestCls.class); String code = cls.getCode().toString(); assertThat(code, containsOne("inner.overload(new ObjIList());")); } }
534
711
package io.apiman.gateway.engine.policies.util; import io.apiman.gateway.engine.beans.ApiRequest; import io.apiman.gateway.engine.beans.PolicyFailure; import io.apiman.gateway.engine.beans.PolicyFailureType; import io.apiman.gateway.engine.components.ILdapComponent; import io.apiman.gateway.engine.components.IPolicyFailureFactoryComponent; import io.apiman.gateway.engine.policies.AuthorizationPolicy; import io.apiman.gateway.engine.policies.BasicAuthLDAPTest; import io.apiman.gateway.engine.policies.BasicAuthenticationPolicy; import io.apiman.gateway.engine.policies.config.BasicAuthenticationConfig; import io.apiman.gateway.engine.policy.IPolicyChain; import io.apiman.gateway.engine.policy.IPolicyContext; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.util.Set; import javax.naming.NamingException; import net.sf.ehcache.Cache; import net.sf.ehcache.CacheManager; import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.directory.api.ldap.model.entry.DefaultEntry; import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.ldif.LdifEntry; import org.apache.directory.api.ldap.model.ldif.LdifReader; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.api.ldap.schema.manager.impl.DefaultSchemaManager; import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.api.DnFactory; import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition; import org.apache.directory.server.core.shared.DefaultDnFactory; import org.apache.directory.server.i18n.I18n; import org.junit.Assert; import org.mockito.Mockito; /** * Some common initialisation code for LDAP testing that can be mixed in without requiring inheritance * * @author <NAME> @{literal<<EMAIL>>} */ public interface LdapTestMixin { /** * Initialise the LDAP server with basic test setup. */ default JdbmPartition initLdapTestSetup( String partitionName, File targetDir, CacheManager ehCacheManager, DirectoryService service ) throws IOException, LdapException { JdbmPartition partition; if (!targetDir.isDirectory()) { throw new UncheckedIOException( new IOException("Couldn't find maven target directory: " + targetDir)); } File partitionDir = new File(targetDir, partitionName); if (partitionDir.exists()) { FileUtils.deleteDirectory(partitionDir); } partitionDir.mkdirs(); // Requires EHCache! String ehCacheName = "apiman-" + partitionName + "-ehcache-testing"; ehCacheManager.addCache(ehCacheName); Cache cache = ehCacheManager.getCache(ehCacheName); final SchemaManager schemaManager = new DefaultSchemaManager(); final DnFactory defaultDnFactory = new DefaultDnFactory(schemaManager, cache); partition = new JdbmPartition(schemaManager, defaultDnFactory); partition.setId("apiman"); partition.setPartitionPath(partitionDir.toURI()); partition.setSchemaManager(service.getSchemaManager()); partition.setSuffixDn(new Dn("o=apiman")); service.addPartition(partition); // Inject the foo root entry if it does not already exist try { service.getAdminSession().lookup(partition.getSuffixDn()); } catch (Exception lnnfe) { Dn dn = new Dn("o=apiman"); Entry entry = service.newEntry(dn); entry.add("objectClass", "top", "domain", "extensibleObject"); entry.add("dc", "apiman"); entry.add("cn", "o=apiman"); service.getAdminSession().add(entry); } return partition; } default void injectLdifFiles(DirectoryService service, String... ldifFiles) throws Exception { if (ldifFiles != null && ldifFiles.length > 0) { for (String ldifFile : ldifFiles) { InputStream is = null; try { is = BasicAuthLDAPTest.class.getClassLoader().getResourceAsStream(ldifFile); if (is == null) { throw new FileNotFoundException("LDIF file '" + ldifFile + "' not found."); } else { try { LdifReader ldifReader = new LdifReader(is); for (LdifEntry entry : ldifReader) { injectEntry(entry, service); } ldifReader.close(); } catch (Exception e) { throw new RuntimeException(e); } } } finally { IOUtils.closeQuietly(is); } } } } default void injectEntry(LdifEntry entry, DirectoryService service) throws Exception { if (entry.isChangeAdd()) { service.getAdminSession().add( new DefaultEntry(service.getSchemaManager(), entry.getEntry())); } else if (entry.isChangeModify()) { service.getAdminSession().modify(entry.getDn(), entry.getModifications()); } else { String message = I18n.err(I18n.ERR_117, entry.getChangeType()); throw new NamingException(message); } } /** * Creates the http Authorization string for the given credentials. * * @param username * @param password */ default String createBasicAuthorization(String username, String password) { String creds = username + ":" + password; StringBuilder builder = new StringBuilder(); builder.append("Basic "); builder.append(Base64.encodeBase64String(creds.getBytes())); return builder.toString(); } default void doTest(String json, String username, String password, Integer expectedFailureCode, ILdapComponent ldapComponentUnderTest) throws Exception { doTest(json, username, password, expectedFailureCode, null, ldapComponentUnderTest); } // pass null if you expect success default void doTest(String json, String username, String password, Integer expectedFailureCode, Set<String> expectedRoles, ILdapComponent ldapComponentUnderTest ) { BasicAuthenticationPolicy policy = new BasicAuthenticationPolicy(); BasicAuthenticationConfig config = policy.parseConfiguration(json); ApiRequest request = new ApiRequest(); request.setType("GET"); request.setApiKey("12345"); request.setRemoteAddr("1.2.3.4"); request.setDestination("/"); IPolicyContext context = Mockito.mock(IPolicyContext.class); final PolicyFailure failure = new PolicyFailure(); Mockito.when(context.getComponent(IPolicyFailureFactoryComponent.class)) .thenReturn((PolicyFailureType type, int failureCode, String message) -> { failure.setType(type); failure.setFailureCode(failureCode); failure.setMessage(message); return failure; }); // The LDAP stuff we're testing! Mockito.when(context.getComponent(ILdapComponent.class)).thenReturn(ldapComponentUnderTest); IPolicyChain<ApiRequest> chain = Mockito.mock(IPolicyChain.class); if (username != null) { request.getHeaders().put("Authorization", createBasicAuthorization(username, password)); } if (expectedFailureCode == null) { policy.apply(request, context, config, chain); Mockito.verify(chain).doApply(request); } else { policy.apply(request, context, config, chain); Mockito.verify(chain).doFailure(failure); Assert.assertEquals(expectedFailureCode.intValue(), failure.getFailureCode()); } if (expectedRoles != null && expectedFailureCode == null) { Mockito.verify(context).setAttribute(AuthorizationPolicy.AUTHENTICATED_USER_ROLES, expectedRoles); } } }
3,520
1,968
////////////////////////////////////////////////////////////////////////////// // // This file is part of the Corona game engine. // For overview and more information on licensing please refer to README.md // Home page: https://github.com/coronalabs/corona // Contact: <EMAIL> // ////////////////////////////////////////////////////////////////////////////// #ifndef _Rtt_GLGeometry_H__ #define _Rtt_GLGeometry_H__ #include "Renderer/Rtt_GL.h" #include "Renderer/Rtt_GPUResource.h" // ---------------------------------------------------------------------------- namespace Rtt { // ---------------------------------------------------------------------------- class GLGeometry : public GPUResource { public: typedef GPUResource Super; typedef GLGeometry Self; public: GLGeometry(); virtual void Create( CPUResource* resource ); virtual void Update( CPUResource* resource ); virtual void Destroy(); virtual void Bind(); private: GLvoid* fPositionStart; GLvoid* fTexCoordStart; GLvoid* fColorScaleStart; GLvoid* fUserDataStart; GLuint fVAO; GLuint fVBO; GLuint fIBO; U32 fVertexCount; U32 fIndexCount; }; // ---------------------------------------------------------------------------- } // namespace Rtt // ---------------------------------------------------------------------------- #endif // _Rtt_GLGeometry_H__
373
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Paris","circ":"2ème circonscription","dpt":"Paris","inscrits":71740,"abs":27195,"votants":44545,"blancs":275,"nuls":111,"exp":44159,"res":[{"nuance":"REM","nom":"<NAME>","voix":18463},{"nuance":"LR","nom":"Mme <NAME>","voix":8007},{"nuance":"DVD","nom":"<NAME>","voix":4050},{"nuance":"SOC","nom":"Mme <NAME>","voix":2700},{"nuance":"FI","nom":"Mme <NAME>","voix":2632},{"nuance":"ECO","nom":"<NAME>","voix":2086},{"nuance":"DVD","nom":"<NAME>","voix":1991},{"nuance":"FN","nom":"Mme <NAME>","voix":1021},{"nuance":"COM","nom":"Mme <NAME>","voix":594},{"nuance":"DVD","nom":"Mme <NAME>","voix":557},{"nuance":"DIV","nom":"<NAME>","voix":493},{"nuance":"DIV","nom":"M. <NAME>","voix":336},{"nuance":"DLF","nom":"<NAME>","voix":211},{"nuance":"DIV","nom":"Mme <NAME>","voix":190},{"nuance":"DVG","nom":"<NAME>","voix":190},{"nuance":"DVD","nom":"Mme <NAME>","voix":179},{"nuance":"EXG","nom":"Mme <NAME>","voix":120},{"nuance":"EXD","nom":"M. <NAME>","voix":97},{"nuance":"DIV","nom":"<NAME>","voix":92},{"nuance":"DIV","nom":"<NAME>","voix":71},{"nuance":"DIV","nom":"M. <NAME>","voix":45},{"nuance":"DVD","nom":"M. <NAME>","voix":34},{"nuance":"DVD","nom":"Mme <NAME>","voix":0},{"nuance":"DIV","nom":"<NAME>","voix":0}]}
502
5,169
{ "name": "TGLStackedViewController", "version": "2.1.2", "license": "MIT", "summary": "A stacked view layout with gesture-based reordering using a UICollectionView -- inspired by Passbook and Reminders apps.", "homepage": "https://github.com/gleue/TGLStackedViewController", "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/gleue/TGLStackedViewController.git", "tag": "2.1.2" }, "source_files": "TGLStackedViewController", "requires_arc": true, "platforms": { "ios": "9.0" } }
211
575
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/core/html/media/html_media_element.h" #include <algorithm> #include <memory> #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/blink/public/platform/web_fullscreen_video_status.h" #include "third_party/blink/renderer/core/dom/events/native_event_listener.h" #include "third_party/blink/renderer/core/event_type_names.h" #include "third_party/blink/renderer/core/frame/local_frame.h" #include "third_party/blink/renderer/core/fullscreen/fullscreen.h" #include "third_party/blink/renderer/core/html/media/html_video_element.h" #include "third_party/blink/renderer/core/html/media/media_controls.h" #include "third_party/blink/renderer/core/html/media/media_custom_controls_fullscreen_detector.h" #include "third_party/blink/renderer/core/html/track/text_track.h" #include "third_party/blink/renderer/core/html/track/text_track_cue_list.h" #include "third_party/blink/renderer/core/html/track/vtt/vtt_cue.h" #include "third_party/blink/renderer/core/loader/empty_clients.h" #include "third_party/blink/renderer/core/testing/page_test_base.h" #include "third_party/blink/renderer/platform/bindings/microtask.h" #include "third_party/blink/renderer/platform/testing/empty_web_media_player.h" #include "third_party/blink/renderer/platform/testing/runtime_enabled_features_test_helpers.h" #include "third_party/blink/renderer/platform/testing/testing_platform_support.h" #include "third_party/blink/renderer/platform/testing/unit_test_helpers.h" namespace blink { namespace { // Most methods are faked rather than mocked. Faking avoids naggy warnings // about unexpected calls. HTMLMediaElement <-> WebMediaplayer interface is // highly complex and not generally the focus these tests (with the // exception of the mocked methods). class FakeWebMediaPlayer final : public EmptyWebMediaPlayer { public: FakeWebMediaPlayer(WebMediaPlayerClient* client, ExecutionContext* context, double duration) : client_(client), context_(context), duration_(duration) {} MOCK_METHOD1(SetIsEffectivelyFullscreen, void(blink::WebFullscreenVideoStatus)); double CurrentTime() const override { return current_time_; } // Establish a large so tests can attempt seeking. double Duration() const override { return duration_; } WebTimeRanges Seekable() const override { WebTimeRange single_range[] = {WebTimeRange(0, Duration())}; return WebTimeRanges(single_range, 1); } void Seek(double seconds) override { last_seek_time_ = seconds; } void Play() override { playing_ = true; ScheduleTimeIncrement(); } void Pause() override { playing_ = false; } bool Paused() const override { return !playing_; } bool IsEnded() const override { return current_time_ == duration_; } void FinishSeek() { ASSERT_GE(last_seek_time_, 0); current_time_ = last_seek_time_; last_seek_time_ = -1; client_->TimeChanged(); if (playing_) ScheduleTimeIncrement(); } void SetAutoIncrementTimeDelta(base::Optional<base::TimeDelta> delta) { auto_time_increment_delta_ = delta; ScheduleTimeIncrement(); } private: void ScheduleTimeIncrement() { if (scheduled_time_increment_) { return; } if (!auto_time_increment_delta_.has_value()) { return; } context_->GetTaskRunner(TaskType::kInternalMediaRealTime) ->PostDelayedTask(FROM_HERE, base::BindOnce(&FakeWebMediaPlayer::AutoTimeIncrement, base::Unretained(this), auto_time_increment_delta_.value()), auto_time_increment_delta_.value()); scheduled_time_increment_ = true; } void AutoTimeIncrement(base::TimeDelta time_delta) { // If time increments have been disabled since posting the task, bail out if (!auto_time_increment_delta_.has_value() || !playing_) { return; } scheduled_time_increment_ = false; current_time_ += time_delta.InSecondsF(); // Notify the client if we've reached the end of the set duration if (current_time_ >= duration_) { current_time_ = duration_; client_->TimeChanged(); } else { ScheduleTimeIncrement(); } // Run V8 Microtasks (update OfficialPlaybackPosition) Microtask::PerformCheckpoint(context_->GetIsolate()); } WebMediaPlayerClient* client_; WeakPersistent<ExecutionContext> context_; mutable double current_time_ = 0; bool playing_ = false; base::Optional<base::TimeDelta> auto_time_increment_delta_ = base::TimeDelta::FromMilliseconds(33); bool scheduled_time_increment_ = false; double last_seek_time_ = -1; const double duration_; }; class MediaStubLocalFrameClient : public EmptyLocalFrameClient { public: std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer( HTMLMediaElement& element, const WebMediaPlayerSource&, WebMediaPlayerClient* client) override { return std::make_unique<FakeWebMediaPlayer>( client, element.GetExecutionContext(), media_duration_); } void SetMediaDuration(double media_duration) { media_duration_ = media_duration; } private: double media_duration_ = 1000000; }; using testing::_; using testing::AtLeast; using testing::Return; } // anonymous namespace class HTMLMediaElementEventListenersTest : public PageTestBase { protected: void SetUp() override { SetupPageWithClients(nullptr, MakeGarbageCollected<MediaStubLocalFrameClient>()); } void DestroyDocument() { PageTestBase::TearDown(); } HTMLVideoElement* Video() { return To<HTMLVideoElement>(GetDocument().QuerySelector("video")); } FakeWebMediaPlayer* WebMediaPlayer() { return static_cast<FakeWebMediaPlayer*>(Video()->GetWebMediaPlayer()); } MediaStubLocalFrameClient* LocalFrameClient() { return static_cast<MediaStubLocalFrameClient*>(GetFrame().Client()); } void SetMediaDuration(double duration) { LocalFrameClient()->SetMediaDuration(duration); } MediaControls* Controls() { return Video()->GetMediaControls(); } void SimulateReadyState(HTMLMediaElement::ReadyState state) { Video()->SetReadyState(state); } void SimulateNetworkState(HTMLMediaElement::NetworkState state) { Video()->SetNetworkState(state); } MediaCustomControlsFullscreenDetector* FullscreenDetector() { return Video()->custom_controls_fullscreen_detector_; } }; TEST_F(HTMLMediaElementEventListenersTest, RemovingFromDocumentCollectsAll) { EXPECT_EQ(Video(), nullptr); GetDocument().body()->setInnerHTML("<video controls></video>"); EXPECT_NE(Video(), nullptr); EXPECT_TRUE(Video()->HasEventListeners()); EXPECT_NE(Controls(), nullptr); EXPECT_TRUE(GetDocument().HasEventListeners()); WeakPersistent<HTMLVideoElement> weak_persistent_video = Video(); WeakPersistent<MediaControls> weak_persistent_controls = Controls(); { Persistent<HTMLVideoElement> persistent_video = Video(); GetDocument().body()->setInnerHTML(""); // When removed from the document, the event listeners should have been // dropped. EXPECT_FALSE(GetDocument().HasEventListeners()); // The video element should still have some event listeners. EXPECT_TRUE(persistent_video->HasEventListeners()); } test::RunPendingTasks(); ThreadState::Current()->CollectAllGarbageForTesting(); // They have been GC'd. EXPECT_EQ(weak_persistent_video, nullptr); EXPECT_EQ(weak_persistent_controls, nullptr); } TEST_F(HTMLMediaElementEventListenersTest, ReInsertingInDocumentCollectsControls) { EXPECT_EQ(Video(), nullptr); GetDocument().body()->setInnerHTML("<video controls></video>"); EXPECT_NE(Video(), nullptr); EXPECT_TRUE(Video()->HasEventListeners()); EXPECT_NE(Controls(), nullptr); EXPECT_TRUE(GetDocument().HasEventListeners()); // This should be a no-op. We keep a reference on the VideoElement to avoid an // unexpected GC. { Persistent<HTMLVideoElement> video_holder = Video(); GetDocument().body()->RemoveChild(Video()); GetDocument().body()->AppendChild(video_holder.Get()); } EXPECT_TRUE(GetDocument().HasEventListeners()); EXPECT_TRUE(Video()->HasEventListeners()); test::RunPendingTasks(); ThreadState::Current()->CollectAllGarbageForTesting(); EXPECT_NE(Video(), nullptr); EXPECT_NE(Controls(), nullptr); EXPECT_EQ(Controls(), Video()->GetMediaControls()); } TEST_F(HTMLMediaElementEventListenersTest, FullscreenDetectorTimerCancelledOnContextDestroy) { EXPECT_EQ(Video(), nullptr); GetDocument().body()->setInnerHTML("<video></video>"); Video()->SetSrc("http://example.com"); test::RunPendingTasks(); EXPECT_NE(WebMediaPlayer(), nullptr); // Set ReadyState as HaveMetadata and go fullscreen, so the timer is fired. EXPECT_NE(Video(), nullptr); SimulateReadyState(HTMLMediaElement::kHaveMetadata); LocalFrame::NotifyUserActivation( GetDocument().GetFrame(), mojom::UserActivationNotificationType::kTest); Fullscreen::RequestFullscreen(*Video()); Fullscreen::DidResolveEnterFullscreenRequest(GetDocument(), true /* granted */); test::RunPendingTasks(); Persistent<Document> persistent_document = &GetDocument(); Persistent<MediaCustomControlsFullscreenDetector> detector = FullscreenDetector(); Vector<blink::WebFullscreenVideoStatus> observed_results; ON_CALL(*WebMediaPlayer(), SetIsEffectivelyFullscreen(_)) .WillByDefault(testing::Invoke( [&](blink::WebFullscreenVideoStatus fullscreen_video_status) { observed_results.push_back(fullscreen_video_status); })); DestroyDocument(); test::RunPendingTasks(); // Document should not have listeners as the ExecutionContext is destroyed. EXPECT_FALSE(persistent_document->HasEventListeners()); // Should only notify the kNotEffectivelyFullscreen value when // ExecutionContext is destroyed. EXPECT_EQ(1u, observed_results.size()); EXPECT_EQ(blink::WebFullscreenVideoStatus::kNotEffectivelyFullscreen, observed_results[0]); } class MockEventListener final : public NativeEventListener { public: MOCK_METHOD2(Invoke, void(ExecutionContext* executionContext, Event*)); }; class HTMLMediaElementWithMockSchedulerTest : public HTMLMediaElementEventListenersTest { protected: void SetUp() override { EnablePlatform(); // We want total control over when to advance the clock. This also allows // us to call platform()->RunUntilIdle() to run all pending tasks without // fear of looping forever. platform()->SetAutoAdvanceNowToPendingTasks(false); // DocumentParserTiming has DCHECKS to make sure time > 0.0. platform()->AdvanceClockSeconds(1); HTMLMediaElementEventListenersTest::SetUp(); } }; TEST_F(HTMLMediaElementWithMockSchedulerTest, OneTimeupdatePerSeek) { testing::InSequence dummy; GetDocument().body()->setInnerHTML("<video></video>"); // Set a src to trigger WebMediaPlayer creation. Video()->SetSrc("http://example.com"); platform()->RunUntilIdle(); ASSERT_NE(WebMediaPlayer(), nullptr); auto* timeupdate_handler = MakeGarbageCollected<MockEventListener>(); Video()->addEventListener(event_type_names::kTimeupdate, timeupdate_handler); // Simulate conditions where playback is possible. SimulateNetworkState(HTMLMediaElement::kNetworkIdle); SimulateReadyState(HTMLMediaElement::kHaveFutureData); // Simulate advancing playback time. WebMediaPlayer()->SetAutoIncrementTimeDelta( base::TimeDelta::FromMilliseconds(33)); Video()->Play(); // While playing, timeupdate should fire every 250 ms -> 4x per second as long // as media player's CurrentTime continues to advance. EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(4); platform()->RunForPeriodSeconds(1); // If media playback time is fixed, periodic timeupdate's should not continue // to fire. WebMediaPlayer()->SetAutoIncrementTimeDelta(base::nullopt); EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(0); platform()->RunForPeriodSeconds(1); // Per spec, pausing should fire `timeupdate` EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(1); Video()->pause(); platform()->RunUntilIdle(); // Seek to some time in the past. A completed seek while paused should trigger // a *single* timeupdate. EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(1); // The WebMediaPlayer current time should have progressed to almost 1 second // (Actually 0.99 due to |kFakeMediaPlayerAutoIncrementTimeDelta|). ASSERT_GE(WebMediaPlayer()->CurrentTime(), 0.95); Video()->setCurrentTime(0.5); // Fake the callback from WebMediaPlayer to complete the seek. WebMediaPlayer()->FinishSeek(); // Give the scheduled timeupdate a chance to fire. platform()->RunUntilIdle(); } TEST_F(HTMLMediaElementWithMockSchedulerTest, PeriodicTimeupdateAfterSeek) { testing::InSequence dummy; GetDocument().body()->setInnerHTML("<video></video>"); // Set a src to trigger WebMediaPlayer creation. Video()->SetSrc("http://example.com"); platform()->RunUntilIdle(); EXPECT_NE(WebMediaPlayer(), nullptr); auto* timeupdate_handler = MakeGarbageCollected<MockEventListener>(); Video()->addEventListener(event_type_names::kTimeupdate, timeupdate_handler); // Simulate conditions where playback is possible. SimulateNetworkState(HTMLMediaElement::kNetworkIdle); SimulateReadyState(HTMLMediaElement::kHaveFutureData); // Simulate advancing playback time to enable periodic timeupdates. WebMediaPlayer()->SetAutoIncrementTimeDelta( base::TimeDelta::FromMilliseconds(8)); Video()->Play(); // Advance a full periodic timeupdate interval (250 ms) and expect a single // timeupdate. EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(1); platform()->RunForPeriodSeconds(.250); // The event is scheduled, but needs one more scheduler cycle to fire. platform()->RunUntilIdle(); // Now advance 125 ms to reach the middle of the periodic timeupdate interval. // no additional timeupdate should trigger. EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(0); platform()->RunForPeriodSeconds(.125); platform()->RunUntilIdle(); // While still in the middle of the periodic timeupdate interval, start and // complete a seek and verify that a *non-periodic* timeupdate is fired. EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(1); ASSERT_GE(WebMediaPlayer()->CurrentTime(), 0.3); Video()->setCurrentTime(0.2); WebMediaPlayer()->FinishSeek(); // Expect another timeupdate after FinishSeek due to // seeking -> begin scrubbing -> pause -> timeupdate. EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(1); platform()->RunUntilIdle(); // Advancing the remainder of the last periodic timeupdate interval should be // insufficient to trigger a new timeupdate event because the seek's // timeupdate occurred only 125ms ago. We desire to fire periodic timeupdates // exactly every 250ms from the last timeupdate, and the seek's timeupdate // should reset that 250ms ms countdown. EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(0); platform()->RunForPeriodSeconds(.125); platform()->RunUntilIdle(); // Advancing another 125ms, we should expect a new timeupdate because we are // now 250ms from the seek's timeupdate. EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(1); platform()->RunForPeriodSeconds(.125); platform()->RunUntilIdle(); // Advancing 250ms further, we should expect yet another timeupdate because // this represents a full periodic timeupdate interval with no interruptions // (e.g. no-seeks). EXPECT_CALL(*timeupdate_handler, Invoke(_, _)).Times(1); platform()->RunForPeriodSeconds(.250); platform()->RunUntilIdle(); } TEST_F(HTMLMediaElementWithMockSchedulerTest, ShowPosterFlag_FalseAfterLoop) { testing::InSequence dummy; // Adjust the duration of the media to something we can reasonably loop SetMediaDuration(10.0); // Create a looping video with a source GetDocument().body()->setInnerHTML( "<video loop src=\"http://example.com\"></video>"); platform()->RunUntilIdle(); EXPECT_NE(WebMediaPlayer(), nullptr); EXPECT_EQ(WebMediaPlayer()->Duration(), 10.0); EXPECT_TRUE(Video()->Loop()); SimulateNetworkState(HTMLMediaElement::kNetworkIdle); SimulateReadyState(HTMLMediaElement::kHaveEnoughData); // Simulate advancing playback time to enable periodic timeupdates. WebMediaPlayer()->SetAutoIncrementTimeDelta( base::TimeDelta::FromMilliseconds(8)); Video()->Play(); // Ensure the 'seeking' and 'seeked' events are fired, so we know a loop // occurred auto* seeking_handler = MakeGarbageCollected<MockEventListener>(); EXPECT_CALL(*seeking_handler, Invoke(_, _)).Times(1); Video()->addEventListener(event_type_names::kSeeking, seeking_handler); platform()->RunForPeriodSeconds(15); testing::Mock::VerifyAndClearExpectations(seeking_handler); auto* seeked_handler = MakeGarbageCollected<MockEventListener>(); EXPECT_CALL(*seeked_handler, Invoke(_, _)).Times(1); Video()->addEventListener(event_type_names::kSeeked, seeked_handler); WebMediaPlayer()->FinishSeek(); platform()->RunUntilIdle(); testing::Mock::VerifyAndClearExpectations(seeked_handler); // ShowPosterFlag should be false after looping EXPECT_FALSE(Video()->IsShowPosterFlagSet()); } TEST_F(HTMLMediaElementWithMockSchedulerTest, ShowPosterFlag_FalseAfterEnded) { testing::InSequence dummy; // Adjust the duration of the media to something we can reach the end of SetMediaDuration(10.0); // Create a video with a source GetDocument().body()->setInnerHTML( "<video src=\"http://example.com\"></video>"); platform()->RunUntilIdle(); EXPECT_NE(WebMediaPlayer(), nullptr); EXPECT_EQ(WebMediaPlayer()->Duration(), 10.0); SimulateNetworkState(HTMLMediaElement::kNetworkIdle); SimulateReadyState(HTMLMediaElement::kHaveEnoughData); // Simulate advancing playback time to enable periodic timeupdates. WebMediaPlayer()->SetAutoIncrementTimeDelta( base::TimeDelta::FromMilliseconds(8)); Video()->Play(); // Ensure the 'ended' event is fired auto* ended_handler = MakeGarbageCollected<MockEventListener>(); Video()->addEventListener(event_type_names::kEnded, ended_handler); EXPECT_CALL(*ended_handler, Invoke(_, _)).Times(1); platform()->RunForPeriodSeconds(15); testing::Mock::VerifyAndClearExpectations(ended_handler); // ShowPosterFlag should be false even after ending EXPECT_FALSE(Video()->IsShowPosterFlagSet()); } struct TestCue { double start_time; double end_time; char const* text; }; constexpr TestCue kTestCueData[] = { {15.000, 17.950, "At the left we can see..."}, {18.160, 20.080, "At the right we can see the..."}, {20.110, 21.960, "...the head-snarlers"}, {21.990, 24.360, "Everything is safe.\nPerfectly safe."}, {24.580, 27.030, "Emo?"}, {28.200, 29.990, "Watch out!"}, {47.030, 48.490, "Are you hurt?"}, {51.990, 53.940, "I don't think so.\nYou?"}, {55.160, 56.980, "I'm Ok."}, {57.110, 61.110, "Get up.\nEmo, it's not safe here."}, {62.030, 63.570, "Let's go."}, }; constexpr base::TimeDelta kTestCueDataLength = base::TimeDelta::FromSecondsD(65); class CueEventListener final : public NativeEventListener { public: void Invoke(ExecutionContext* ctx, Event* event) override { if (event->type() == event_type_names::kEnter) { EXPECT_TRUE(event->target()->GetWrapperTypeInfo()->Equals( VTTCue::GetStaticWrapperTypeInfo())); auto* const cue = static_cast<VTTCue*>(event->target()); auto* const media_element = cue->track()->MediaElement(); OnCueEnter(media_element, cue); return; } else if (event->type() == event_type_names::kExit) { EXPECT_TRUE(event->target()->GetWrapperTypeInfo()->Equals( VTTCue::GetStaticWrapperTypeInfo())); auto* const cue = static_cast<VTTCue*>(event->target()); auto* const media_element = cue->track()->MediaElement(); OnCueExit(media_element, cue); return; } // The above checks should be exhaustive FAIL(); } void ExpectAllEventsFiredWithinMargin(base::TimeDelta margin) const { for (auto const& delta : cue_event_deltas_) { EXPECT_TRUE(delta.enter_time_delta.has_value()); EXPECT_LE(delta.enter_time_delta.value(), margin); EXPECT_GE(delta.enter_time_delta.value(), base::TimeDelta()); EXPECT_TRUE(delta.exit_time_delta.has_value()); EXPECT_GE(delta.exit_time_delta.value(), base::TimeDelta()); EXPECT_LE(delta.exit_time_delta.value(), margin); } } private: struct CueChangeEventTimeDelta { // The difference between when the cue was scheduled to begin and when the // |kEnter| event was fired. The optional will be empty if the |kEnter| // event was never fired. base::Optional<base::TimeDelta> enter_time_delta; // The difference between when the cue was scheduled to end and when the // |kExit| event fired. The optional will be empty if the |kExit| event // was never fired. base::Optional<base::TimeDelta> exit_time_delta; }; void OnCueEnter(HTMLMediaElement* media_element, VTTCue* cue) { auto const cue_index = cue->CueIndex(); EXPECT_LE(cue_index, cue_event_deltas_.size()); EXPECT_FALSE(cue_event_deltas_[cue_index].enter_time_delta.has_value()); // Get the start time delta double const diff_seconds = media_element->currentTime() - cue->startTime(); cue_event_deltas_[cue_index].enter_time_delta = base::TimeDelta::FromSecondsD(diff_seconds); } void OnCueExit(HTMLMediaElement* media_element, VTTCue* cue) { auto const cue_index = cue->CueIndex(); EXPECT_LE(cue_index, cue_event_deltas_.size()); EXPECT_FALSE(cue_event_deltas_[cue_index].exit_time_delta.has_value()); // Get the end time delta double const diff_seconds = std::fabs(media_element->currentTime() - cue->endTime()); cue_event_deltas_[cue_index].exit_time_delta = base::TimeDelta::FromSecondsD(diff_seconds); } std::array<CueChangeEventTimeDelta, base::size(kTestCueData)> cue_event_deltas_; }; TEST_F(HTMLMediaElementWithMockSchedulerTest, CueEnterExitEventLatency) { testing::InSequence dummy; GetDocument().body()->setInnerHTML("<video></video>"); // Set a src to trigger WebMediaPlayer creation. Video()->SetSrc("http://example.com"); platform()->RunUntilIdle(); ASSERT_NE(WebMediaPlayer(), nullptr); // Create a text track, and fill it with cue data auto* text_track = Video()->addTextTrack("subtitles", "", "", ASSERT_NO_EXCEPTION); auto* listener = MakeGarbageCollected<CueEventListener>(); for (auto cue_data : kTestCueData) { VTTCue* cue = MakeGarbageCollected<VTTCue>( GetDocument(), cue_data.start_time, cue_data.end_time, cue_data.text); text_track->addCue(cue); cue->setOnenter(listener); cue->setOnexit(listener); } // Simulate conditions where playback is possible. SimulateNetworkState(HTMLMediaElement::kNetworkIdle); SimulateReadyState(HTMLMediaElement::kHaveFutureData); // Simulate advancing playback time to enable periodic timeupdates. WebMediaPlayer()->SetAutoIncrementTimeDelta( base::TimeDelta::FromMilliseconds(8)); Video()->Play(); platform()->RunForPeriod(kTestCueDataLength); platform()->RunUntilIdle(); // Ensure all cue events fired when expected with a 20ms tolerance // As suggested by the spec: // https://html.spec.whatwg.org/multipage/media.html#playing-the-media-resource:current-playback-position-13 listener->ExpectAllEventsFiredWithinMargin( base::TimeDelta::FromMilliseconds(20)); } } // namespace blink
8,163
5,169
<reponame>Ray0218/Specs<filename>Specs/CircleProgressView/1.0.8/CircleProgressView.podspec.json { "name": "CircleProgressView", "version": "1.0.8", "license": "MIT", "summary": "CircleProgressView", "homepage": "https://github.com/CardinalNow/iOS-CircleProgressView", "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/CardinalNow/iOS-CircleProgressView.git", "tag": "1.0.8" }, "platforms": { "ios": "8.0" }, "requires_arc": true, "source_files": "ProgressView/*.swift" }
225
344
<reponame>microsoft/archai # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. import argparse from typing import Dict, List, Type, Iterator, Tuple import glob import os import pathlib from collections import OrderedDict import yaml from inspect import getsourcefile import re from runstats import Statistics import matplotlib matplotlib.use('Agg') import seaborn as sns import numpy as np import matplotlib.pyplot as plt import pandas as pd from archai.common import utils from archai.common.ordereddict_logger import OrderedDictLogger import re def main(): parser = argparse.ArgumentParser(description='Report creator') parser.add_argument('--results-dir', '-d', type=str, default=r'~/logdir/proxynas_test_0001', # r'~/logdir/report_test' help='folder with experiment results from pt') parser.add_argument('--out-dir', '-o', type=str, default=r'~/logdir/reports', help='folder to output reports') args, extra_args = parser.parse_known_args() # root dir where all results are stored results_dir = pathlib.Path(utils.full_path(args.results_dir)) print(f'results_dir: {results_dir}') # extract experiment name which is top level directory exp_name = results_dir.parts[-1] # create results dir for experiment out_dir = utils.full_path(os.path.join(args.out_dir, exp_name)) print(f'out_dir: {out_dir}') os.makedirs(out_dir, exist_ok=True) # get list of all structured logs for each job logs = {} job_count = 0 for job_dir in results_dir.iterdir(): job_count += 1 for subdir in job_dir.iterdir(): if not subdir.is_dir(): continue # currently we expect that each job was ExperimentRunner job which should have # _search or _eval folders if subdir.stem.endswith('_search'): sub_job = 'search' elif subdir.stem.endswith('_eval'): sub_job = 'eval' else: raise RuntimeError(f'Sub directory "{subdir}" in job "{job_dir}" must ' 'end with either _search or _eval which ' 'should be the case if ExperimentRunner was used.') logs_filepath = os.path.join(str(subdir), 'log.yaml') if os.path.isfile(logs_filepath): fix_yaml(logs_filepath) with open(logs_filepath, 'r') as f: key = job_dir.name + ':' + sub_job logs[key] = yaml.load(f, Loader=yaml.Loader) # create list of epoch nodes having same path in the logs grouped_logs = group_multi_runs(logs) collated_grouped_logs = collect_epoch_nodes(grouped_logs) summary_text, details_text = '', '' for log_key, grouped_logs in collated_grouped_logs.items(): # for each path for epochs nodes, compute stats for node_path, logs_epochs_nodes in grouped_logs.items(): collated_epoch_stats = get_epoch_stats(node_path, logs_epochs_nodes) summary_text += get_summary_text(log_key, out_dir, node_path, collated_epoch_stats, len(logs_epochs_nodes)) details_text += get_details_text(log_key, out_dir, node_path, collated_epoch_stats, len(logs_epochs_nodes)) write_report('summary.md', **vars()) write_report('details.md', **vars()) def epoch_nodes(node:OrderedDict, path=[])->Iterator[Tuple[List[str], OrderedDict]]: """Search nodes recursively for nodes named 'epochs' and return them along with their paths""" for k, v in node.items(): if k == 'epochs' and isinstance(v, OrderedDict) and len(v) and '0' in v: yield path, v elif isinstance(v, OrderedDict): # make recursive call for p, en in epoch_nodes(v, path=path+[k]): yield p, en def fix_yaml(filepath:str): # fix yaml construction recursion error because of bad lines yaml = pathlib.Path(filepath).read_text() bad_lines = [ r'get: !!python/object/apply:builtins.getattr', r'- *id001', r' - get' ] # form pattern by joining str literals after escape by whitespace /s # Note: don't use re.escape as it cannot be used in re.sub pattern = r'\s+'.join([re.escape(l) for l in bad_lines]) fixed_yaml = re.sub(pattern, '', yaml) if yaml != fixed_yaml: backup = pathlib.Path(filepath+'.original.yaml') assert not backup.exists(), f'Backup file {backup} should not exist' backup.write_text(yaml) pathlib.Path(filepath).write_text(fixed_yaml) print(f'Yaml at {filepath} was fixed') def remove_seed_part(log_key:str)->str: # regex identifies seed123, seed123.4, seed_123, seed_123.4 # pattern is 'seed' followed by optional '_' followed by int or float number pat = r'seed\_?([0-9]*[.])?[0-9]+' return re.sub(pat, '', log_key) def group_multi_runs(logs:Dict[str, OrderedDict])->Dict[str, List[OrderedDict]]: result:Dict[str, List[OrderedDict]] = {} for log_key, log in logs.items(): seed_less_key = remove_seed_part(log_key) if seed_less_key in result: result[seed_less_key].append(log) else: result[seed_less_key] = [log] return result def collect_epoch_nodes(grouped_logs:Dict[str, List[OrderedDict]])->Dict[str, Dict[str, List[OrderedDict]]]: """Make list of epoch nodes in same path in each of the logs if collate=True else its just list of epoch nodes with jobdir and path as the key.""" collated:Dict[str, Dict[str, List[OrderedDict]]] = {} for log_key, logs in grouped_logs.items(): collated_logs:Dict[str, List[OrderedDict]] = {} for log in logs: for path, epoch_node in epoch_nodes(log): # for each path get the list where we can put epoch node path_key = '/'.join(path) if not path_key in collated_logs: collated_logs[path_key] = [] v = collated_logs[path_key] v.append(epoch_node) collated[log_key] = collated_logs return collated class EpochStats: def __init__(self) -> None: self.start_lr = Statistics() self.end_lr = Statistics() self.train_fold = FoldStats() self.val_fold = FoldStats() def update(self, epoch_node:OrderedDict)->None: self.start_lr.push(epoch_node['start_lr']) if 'train' in epoch_node: self.end_lr.push(epoch_node['train']['end_lr']) self.train_fold.update(epoch_node['train']) if 'val' in epoch_node: self.val_fold.update(epoch_node['val']) class FoldStats: def __init__(self) -> None: self.top1 = Statistics() self.top5 = Statistics() self.duration = Statistics() self.step_time = Statistics() def update(self, fold_node:OrderedDict)->None: self.top1.push(fold_node['top1']) self.top5.push(fold_node['top5']) if 'duration' in fold_node: self.duration.push(fold_node['duration']) if 'step_time' in fold_node: self.step_time.push(fold_node['step_time']) def stat2str(stat:Statistics)->str: if len(stat) == 0: return '-' s = f'{stat.mean():.4f}' if len(stat)>1: s += f'<sup> &pm; {stat.stddev():.4f}</sup>' return s def get_epoch_stats(node_path:str, logs_epochs_nodes:List[OrderedDict])->List[EpochStats]: epoch_stats = [] for epochs_node in logs_epochs_nodes: for epoch_num, epoch_node in epochs_node.items(): if not str.isnumeric(epoch_num): # each epoch key must be numeric continue epoch_num = int(epoch_num) if epoch_num >= len(epoch_stats): epoch_stats.append(EpochStats()) epoch_stat = epoch_stats[epoch_num] epoch_stat.update(epoch_node) return epoch_stats def get_valid_filename(s): s = str(s).strip().replace(' ', '-') return re.sub(r'(?u)[^-\w.]', '-', s) def get_summary_text(log_key:str, out_dir:str, node_path:str, epoch_stats:List[EpochStats], seed_runs:int)->str: lines = ['',''] lines.append(f'## Run: {log_key}\n') lines.append(f'### Metric Type: {node_path}\n') lines.append(f'Number of epochs: {len(epoch_stats)}\n') lines.append(f'Number of seeds: {seed_runs}\n') lines.append('\n') plot_filename = get_valid_filename(log_key + ':' + node_path)+'.png' plot_filepath = os.path.join(out_dir, plot_filename) plot_epochs(epoch_stats, plot_filepath) lines.append('') train_duration = Statistics() for epoch_stat in epoch_stats: train_duration += epoch_stat.train_fold.duration lines.append(f'![]({plot_filename})') lines.append(f'Train epoch time: {stat2str(train_duration)}') lines.append('') milestones = [0, 5, 30, 100, 200, 600, 1500] for milestone in milestones: if len(epoch_stats) >= milestone and len(epoch_stats[milestone-1].val_fold.top1)>0: lines.append(f'{stat2str(epoch_stats[milestone-1].val_fold.top1)} val top1 @ {milestone} epochs\n') # last epoch if not len(epoch_stats) in milestones: # find last epoch with valid stats last_epoch = len(epoch_stats)-1 while last_epoch>=0 and len(epoch_stats[last_epoch].val_fold.top1)==0: last_epoch -= 1 if last_epoch >=0: lines.append(f'{stat2str(epoch_stats[last_epoch].val_fold.top1)} val top1 @ {len(epoch_stats)} epochs [Last]\n') else: lines.append(f'[Last] No epoch with valid val stats found!') return '\n'.join(lines) def get_details_text(log_key:str, out_dir:str, node_path:str, epoch_stats:List[EpochStats], seed_runs:int)->str: lines = ['',''] lines.append(f'## Run: {log_key}\n') lines.append(f'### Metric Type: {node_path}\n') lines.append(f'Number of seeds: {seed_runs}\n') lines.append('|Epoch |Val Top1 |Val Top5 |Train Top1 |Train Top5 |Train Duration |Val Duration |Train Step Time |Val Step Time |StartLR |EndLR |') lines.append('|---|---|---|---|---|---|---|---|---|---|---|') for i, epoch_stat in enumerate(epoch_stats): line = '|' line += str(i) + '|' line += stat2str(epoch_stat.val_fold.top1) + '|' line += stat2str(epoch_stat.val_fold.top5) + '|' line += stat2str(epoch_stat.train_fold.top1) + '|' line += stat2str(epoch_stat.train_fold.top5) + '|' line += stat2str(epoch_stat.train_fold.duration) + '|' line += stat2str(epoch_stat.val_fold.duration) + '|' line += stat2str(epoch_stat.train_fold.step_time) + '|' line += stat2str(epoch_stat.val_fold.step_time) + '|' line += stat2str(epoch_stat.start_lr) + '|' line += stat2str(epoch_stat.end_lr) + '|' lines.append(line) return '\n'.join(lines) def plot_epochs(epoch_stats:List[EpochStats], filepath:str): plt.ioff() plt.clf() fig, ax = plt.subplots() clrs = sns.color_palette("husl", 5) with sns.axes_style("darkgrid"): metrics = [] val_top1_means = [es.val_fold.top1.mean() if len(es.val_fold.top1)>0 else np.nan for es in epoch_stats] val_top1_std = [es.val_fold.top1.stddev() if len(es.val_fold.top1)>1 else np.nan for es in epoch_stats] val_top1_min = [es.val_fold.top1.minimum() if len(es.val_fold.top1)>0 else np.nan for es in epoch_stats] val_top1_max = [es.val_fold.top1.maximum() if len(es.val_fold.top1)>0 else np.nan for es in epoch_stats] metrics.append((val_top1_means, val_top1_std, 'val_top1', val_top1_min, val_top1_max)) val_top5_means = [es.val_fold.top5.mean() if len(es.val_fold.top5)>0 else np.nan for es in epoch_stats] val_top5_std = [es.val_fold.top5.stddev() if len(es.val_fold.top5)>1 else np.nan for es in epoch_stats] val_top5_min = [es.val_fold.top5.minimum() if len(es.val_fold.top5)>0 else np.nan for es in epoch_stats] val_top5_max = [es.val_fold.top5.maximum() if len(es.val_fold.top5)>0 else np.nan for es in epoch_stats] metrics.append((val_top5_means, val_top5_std, 'val_top5', val_top5_min, val_top5_max)) train_top1_means = [es.train_fold.top1.mean() if len(es.train_fold.top1)>0 else np.nan for es in epoch_stats] train_top1_std = [es.train_fold.top1.stddev() if len(es.train_fold.top1)>1 else np.nan for es in epoch_stats] train_top1_min = [es.train_fold.top1.minimum() if len(es.train_fold.top1)>0 else np.nan for es in epoch_stats] train_top1_max = [es.train_fold.top1.maximum() if len(es.train_fold.top1)>0 else np.nan for es in epoch_stats] metrics.append((train_top1_means, train_top1_std, 'train_top1', train_top1_min, train_top1_max)) train_top5_means = [es.train_fold.top5.mean() if len(es.train_fold.top5)>0 else np.nan for es in epoch_stats] train_top5_std = [es.train_fold.top5.stddev() if len(es.train_fold.top5)>1 else np.nan for es in epoch_stats] train_top5_min = [es.train_fold.top1.minimum() if len(es.train_fold.top5)>0 else np.nan for es in epoch_stats] train_top5_max = [es.train_fold.top1.maximum() if len(es.train_fold.top5)>0 else np.nan for es in epoch_stats] metrics.append((train_top5_means, train_top5_std, 'train_top5', train_top5_min, train_top5_max)) for i, metric in enumerate(metrics): ax.plot(range(len(metric[0])), metric[0], label=metric[2], c=clrs[i]) ax.fill_between(range(len(metric[0])), np.subtract(metric[0], metric[1]), np.add(metric[0], metric[1]), alpha=0.5, facecolor=clrs[i]) ax.fill_between(range(len(metric[0])), metric[3], metric[4], alpha=0.1, facecolor=clrs[i]) ax.set_xlabel('Epoch') ax.set_ylabel('Accuracy') ax.set_title('Accuracy Metrics') ax.legend() ax.grid('on') # add more ticks #ax.set_xticks(np.arange(max([len(m) for m in metrics]))) # remove tick marks # ax.xaxis.set_tick_params(size=0) # ax.yaxis.set_tick_params(size=0) # change the color of the top and right spines to opaque gray # ax.spines['right'].set_color((.8,.8,.8)) # ax.spines['top'].set_color((.8,.8,.8)) # tweak the axis labels xlab = ax.xaxis.get_label() ylab = ax.yaxis.get_label() xlab.set_style('italic') xlab.set_size(10) ylab.set_style('italic') ylab.set_size(10) # tweak the title ttl = ax.title ttl.set_weight('bold') plt.savefig(filepath) plt.close() def write_report(template_filename:str, **kwargs)->None: source_file = getsourcefile(lambda:0) script_dir = os.path.dirname(os.path.abspath(source_file)) template = pathlib.Path(os.path.join(script_dir, template_filename)).read_text() report = template.format(**kwargs) outfilepath = os.path.join(kwargs['out_dir'], template_filename) with open(outfilepath, 'w', encoding='utf-8') as f: f.write(report) print(f'report written to: {outfilepath}') if __name__ == '__main__': main()
7,436
861
<filename>spring-cloud-gray-server/src/main/java/cn/springcloud/gray/server/module/audit/OperateAuditModule.java package cn.springcloud.gray.server.module.audit; import cn.springcloud.gray.server.module.audit.domain.OperateQuery; import cn.springcloud.gray.server.module.audit.domain.OperateRecord; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; public interface OperateAuditModule { void recordOperate(OperateRecord record); Page<OperateRecord> queryRecords(OperateQuery query, Pageable pageable); }
180
1,014
package com.github.neuralnetworks.training.backpropagation; import com.github.neuralnetworks.calculation.ConnectionCalculator; import com.github.neuralnetworks.tensor.ValuesProvider; public interface BackPropagationConnectionCalculator extends ConnectionCalculator { public ValuesProvider getActivations(); public void setActivations(ValuesProvider activations); }
98
358
#pragma once #include <QCheckBox> #include <agz/editor/texture2d/texture2d.h> #include <agz/tracer/core/material.h> AGZ_EDITOR_BEGIN class NormalMapWidget : public QWidget { Q_OBJECT public: struct InitData { bool apply_normal_map = false; Texture2DSlot *normal_map = nullptr; }; NormalMapWidget(const InitData &init_data, ObjectContext &obj_ctx); NormalMapWidget *clone() const; void save_asset(AssetSaver &saver); void load_asset(AssetLoader &loader); Box<tracer::NormalMapper> get_tracer_object() const; bool is_enabled() const noexcept; RC<tracer::ConfigNode> to_config(JSONExportContext &ctx) const; signals: void change_params(); private: ObjectContext &obj_ctx_; QCheckBox *apply_normal_map_ = nullptr; Texture2DSlot *normal_map_ = nullptr; }; AGZ_EDITOR_END
341
348
<gh_stars>100-1000 /* * Copyright (c) 2015-2020, www.dibo.ltd (<EMAIL>). * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * <p> * https://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.diboot.core.binding.cache; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.extension.service.IService; import com.diboot.core.binding.parser.EntityInfoCache; import com.diboot.core.binding.parser.PropInfo; import com.diboot.core.cache.StaticMemoryCacheManager; import com.diboot.core.config.Cons; import com.diboot.core.util.BeanUtils; import com.diboot.core.util.ContextHelper; import com.diboot.core.util.S; import com.diboot.core.util.V; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.ibatis.session.SqlSessionFactory; import org.springframework.context.annotation.Primary; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.*; /** * CacheManager * @author <EMAIL> * @version v2.2.1 * @date 2021/04/17 */ @SuppressWarnings({"JavaDoc","rawtypes", "unchecked"}) @Slf4j public class BindingCacheManager { /** * 类-EntityInfo缓存key */ private static final String CACHE_NAME_CLASS_ENTITY = "CLASS_ENTITY"; /** * 表-EntityInfo缓存key */ private static final String CACHE_NAME_TABLE_ENTITY = "TABLE_ENTITY"; /** * 类-PropInfo缓存key */ private static final String CACHE_NAME_CLASS_PROP = "CLASS_PROP"; /** * Entity类的SimpleName-Entity Class的缓存key */ private static final String CACHE_NAME_ENTITYNAME_CLASS = "NAME_CLASS"; /** * 类-fields缓存 */ private static final String CACHE_NAME_CLASS_FIELDS = "CLASS_FIELDS"; /** * 类- name-field Map缓存 */ private static final String CACHE_NAME_CLASS_NAME2FLDMAP = "CLASS_NAME2FLDMAP"; /** * 保证单例,以及彻底加载完缓存数据后再返回 */ private static class Singleton { /** * 实体相关定义缓存管理器 */ static StaticMemoryCacheManager cacheManager = new StaticMemoryCacheManager( CACHE_NAME_CLASS_ENTITY, CACHE_NAME_TABLE_ENTITY, CACHE_NAME_CLASS_PROP, CACHE_NAME_ENTITYNAME_CLASS, CACHE_NAME_CLASS_FIELDS, CACHE_NAME_CLASS_NAME2FLDMAP ); static { // 初始化有service的entity缓存 Map<String, IService> serviceMap = ContextHelper.getApplicationContext().getBeansOfType(IService.class); Set<String> uniqueEntitySet = new HashSet<>(); if (V.notEmpty(serviceMap)) { for (Map.Entry<String, IService> entry : serviceMap.entrySet()) { Class<?> entityClass = BeanUtils.getGenericityClass(entry.getValue(), 1); if (entityClass != null) { IService<?> entityService = entry.getValue(); if (uniqueEntitySet.contains(entityClass.getName())) { if (entityService.getClass().getAnnotation(Primary.class) != null) { EntityInfoCache entityInfoCache = cacheManager.getCacheObj(CACHE_NAME_CLASS_ENTITY, entityClass.getName(), EntityInfoCache.class); if (entityInfoCache != null) { entityInfoCache.setService(entry.getKey()); } } else { log.warn("Entity: {} 存在多个service实现类,可能导致调用实例与预期不一致!", entityClass.getName()); } } else { EntityInfoCache entityInfoCache = new EntityInfoCache(entityClass, entry.getKey()); cacheManager.putCacheObj(CACHE_NAME_CLASS_ENTITY, entityClass.getName(), entityInfoCache); cacheManager.putCacheObj(CACHE_NAME_TABLE_ENTITY, entityInfoCache.getTableName(), entityInfoCache); cacheManager.putCacheObj(CACHE_NAME_ENTITYNAME_CLASS, entityClass.getSimpleName(), entityClass); uniqueEntitySet.add(entityClass.getName()); } } } } else { log.debug("未获取到任何有效@Service."); } // 初始化没有service的table-mapper缓存 SqlSessionFactory sqlSessionFactory = ContextHelper.getBean(SqlSessionFactory.class); if(sqlSessionFactory != null){ Collection<Class<?>> mappers = sqlSessionFactory.getConfiguration().getMapperRegistry().getMappers(); if (V.notEmpty(mappers)) { for (Class<?> mapperClass : mappers) { Type[] types = mapperClass.getGenericInterfaces(); try { if (types.length > 0 && types[0] != null) { ParameterizedType genericType = (ParameterizedType) types[0]; Type[] superTypes = genericType.getActualTypeArguments(); if (superTypes != null && superTypes.length > 0 && superTypes[0] != null) { String entityClassName = superTypes[0].getTypeName(); if (!uniqueEntitySet.contains(entityClassName) && entityClassName.length() > 1) { Class<?> entityClass = Class.forName(entityClassName); EntityInfoCache entityInfoCache = new EntityInfoCache(entityClass, null); entityInfoCache.setBaseMapper((Class<? extends BaseMapper>) mapperClass); cacheManager.putCacheObj(CACHE_NAME_CLASS_ENTITY, entityClass.getName(), entityInfoCache); cacheManager.putCacheObj(CACHE_NAME_TABLE_ENTITY, entityInfoCache.getTableName(), entityInfoCache); cacheManager.putCacheObj(CACHE_NAME_ENTITYNAME_CLASS, entityClass.getSimpleName(), entityClass); uniqueEntitySet.add(entityClass.getName()); } } } } catch (Exception e) { log.warn("解析mapper异常", e); } } } } } } @SneakyThrows private static StaticMemoryCacheManager getCacheManager() { return Singleton.cacheManager; } /** * 根据tableName获取cache * @param tableName * @return */ public static EntityInfoCache getEntityInfoByTable(String tableName){ return getCacheManager().getCacheObj(CACHE_NAME_TABLE_ENTITY, tableName, EntityInfoCache.class); } /** * 根据entity类获取cache * @param entityClazz * @return */ public static EntityInfoCache getEntityInfoByClass(Class<?> entityClazz){ return getCacheManager().getCacheObj(CACHE_NAME_CLASS_ENTITY, entityClazz.getName(), EntityInfoCache.class); } /** * 根据bean类获取bean信息cache * @param beanClazz * @return */ public static PropInfo getPropInfoByClass(Class<?> beanClazz){ PropInfo propInfo = getCacheManager().getCacheObj(CACHE_NAME_CLASS_PROP, beanClazz.getName(), PropInfo.class); if(propInfo == null){ propInfo = initPropInfoCache(beanClazz); } return propInfo; } /** * 根据tableName获取bean信息cache * @param tableName * @return */ public static PropInfo getPropInfoByTable(String tableName){ Class<?> entityClass = getEntityClassByTable(tableName); if(entityClass != null){ return getPropInfoByClass(entityClass); } return null; } /** * 根据table名称获取entity类 * @param tableName * @return */ public static Class<?> getEntityClassByTable(String tableName){ EntityInfoCache entityInfoCache = getEntityInfoByTable(tableName); return entityInfoCache != null? entityInfoCache.getEntityClass() : null; } /** * 根据class simple名称获取entity类 * @param classSimpleName * @return */ public static Class<?> getEntityClassBySimpleName(String classSimpleName){ return getCacheManager().getCacheObj(CACHE_NAME_ENTITYNAME_CLASS, classSimpleName, Class.class); } /** * 通过table获取mapper * @param table * @return */ public static BaseMapper getMapperByTable(String table){ EntityInfoCache entityInfoCache = getEntityInfoByTable(table); if(entityInfoCache != null){ return entityInfoCache.getBaseMapper(); } return null; } /** * 通过entity获取mapper * @param entityClazz * @return */ public static BaseMapper getMapperByClass(Class<?> entityClazz){ EntityInfoCache entityInfoCache = getEntityInfoByClass(entityClazz); if(entityInfoCache != null){ return entityInfoCache.getBaseMapper(); } return null; } /** * 获取class的fields * @param beanClazz * @return */ public static List<Field> getFields(Class<?> beanClazz){ List<Field> fields = getCacheManager().getCacheObj(CACHE_NAME_CLASS_FIELDS, beanClazz.getName(), List.class); if(fields == null){ fields = initClassFields(beanClazz, null); getCacheManager().putCacheObj(CACHE_NAME_CLASS_FIELDS, beanClazz.getName(), fields); } return fields; } /** * 获取class中包含指定注解的的fields * @param beanClazz * @return */ public static List<Field> getFields(Class<?> beanClazz, Class<? extends Annotation> annotation){ String key = S.joinWith(Cons.SEPARATOR_COMMA, beanClazz.getName(), annotation.getName()); List<Field> fields = getCacheManager().getCacheObj(CACHE_NAME_CLASS_FIELDS, key, List.class); if(fields == null){ fields = initClassFields(beanClazz, annotation); getCacheManager().putCacheObj(CACHE_NAME_CLASS_FIELDS, key, fields); } return fields; } /** * 获取class的fields * @param beanClazz * @return */ public static Map<String, Field> getFieldsMap(Class<?> beanClazz){ Map<String, Field> fieldsMap = getCacheManager().getCacheObj(CACHE_NAME_CLASS_NAME2FLDMAP, beanClazz.getName(), Map.class); if(fieldsMap == null){ List<Field> fields = getFields(beanClazz); fieldsMap = BeanUtils.convertToStringKeyObjectMap(fields, "name"); getCacheManager().putCacheObj(CACHE_NAME_CLASS_NAME2FLDMAP, beanClazz.getName(), fieldsMap); } return fieldsMap; } /** * 初始化bean的属性缓存 * @param beanClazz * @return */ private static PropInfo initPropInfoCache(Class<?> beanClazz) { PropInfo propInfoCache = new PropInfo(beanClazz); getCacheManager().putCacheObj(CACHE_NAME_CLASS_PROP, beanClazz.getName(), propInfoCache); return propInfoCache; } /** * 初始化fields * @param beanClazz * @return */ private static List<Field> initClassFields(Class<?> beanClazz, Class<? extends Annotation> annotation){ List<Field> fieldList = new ArrayList<>(); Set<String> fieldNameSet = new HashSet<>(); loopFindFields(beanClazz, annotation, fieldList, fieldNameSet); return fieldList; } /** * 循环向上查找fields * @param beanClazz * @param annotation * @param fieldList * @param fieldNameSet */ private static void loopFindFields(Class<?> beanClazz, Class<? extends Annotation> annotation, List<Field> fieldList, Set<String> fieldNameSet){ if(beanClazz == null) { return; } Field[] fields = beanClazz.getDeclaredFields(); if (V.notEmpty(fields)) { for (Field field : fields) { // 被重写属性,以子类的为准 if (!fieldNameSet.add(field.getName())) { continue; } if (annotation == null || field.getAnnotation(annotation) != null) { fieldList.add(field); } } } loopFindFields(beanClazz.getSuperclass(), annotation, fieldList, fieldNameSet); } }
6,304
4,012
/* * Copyright (c) 2021, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include "join_common_utils.hpp" #include <cudf/ast/expressions.hpp> #include <cudf/table/table_view.hpp> #include <rmm/cuda_stream_view.hpp> #include <rmm/mr/device/per_device_resource.hpp> #include <optional> namespace cudf { namespace detail { /** * @brief Computes the join operation between two tables and returns the * output indices of left and right table as a combined table * * @param left Table of left columns to join * @param right Table of right columns to join * tables have been flipped, meaning the output indices should also be flipped * @param JoinKind The type of join to be performed * @param stream CUDA stream used for device memory operations and kernel launches * * @return Join output indices vector pair */ std::pair<std::unique_ptr<rmm::device_uvector<size_type>>, std::unique_ptr<rmm::device_uvector<size_type>>> conditional_join(table_view const& left, table_view const& right, ast::expression const& binary_predicate, join_kind JoinKind, std::optional<std::size_t> output_size = {}, rmm::cuda_stream_view stream = rmm::cuda_stream_default, rmm::mr::device_memory_resource* mr = rmm::mr::get_current_device_resource()); /** * @brief Computes the size of a join operation between two tables without * materializing the result and returns the total size value. * * @param left Table of left columns to join * @param right Table of right columns to join * tables have been flipped, meaning the output indices should also be flipped * @param JoinKind The type of join to be performed * @param stream CUDA stream used for device memory operations and kernel launches * * @return Join output indices vector pair */ std::size_t compute_conditional_join_output_size( table_view const& left, table_view const& right, ast::expression const& binary_predicate, join_kind JoinKind, rmm::cuda_stream_view stream = rmm::cuda_stream_default, rmm::mr::device_memory_resource* mr = rmm::mr::get_current_device_resource()); } // namespace detail } // namespace cudf
898
511
/* **************************************************************** * * Copyright 2014 Samsung Electronics All Rights Reserved. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************/ /** * @file * This file contains the APIs for BT LE communications. */ #ifndef CA_LE_UTILS_H_ #define CA_LE_UTILS_H_ #include "cacommon.h" #include "cathreadpool.h" #include "cagattservice.h" #include "uarraylist.h" #include "jni.h" #ifdef __cplusplus extern "C" { #endif #define CA_LE_AUTO_CONNECT_FLAG 1 #define CA_LE_CONNECTION_STATE 2 #define CA_LE_SEND_STATE 3 #define CA_LE_DESCRIPTOR_FOUND 4 /* Service UUID */ static const char OIC_GATT_SERVICE_UUID[] = CA_GATT_SERVICE_UUID; static const char OIC_GATT_CHARACTERISTIC_REQUEST_UUID[] = CA_GATT_REQUEST_CHRC_UUID; static const char OIC_GATT_CHARACTERISTIC_RESPONSE_UUID[] = CA_GATT_RESPONSE_CHRC_UUID; static const char OIC_GATT_CHARACTERISTIC_CONFIG_UUID[] = "00002902-0000-1000-8000-00805f9b34fb"; static const char CLASSPATH_BT_PROFILE[] = "android/bluetooth/BluetoothProfile"; static const char CLASSPATH_BT_GATT[] = "android/bluetooth/BluetoothGatt"; static const char CLASSPATH_BT_ADAPTER[] = "android/bluetooth/BluetoothAdapter"; static const char CLASSPATH_BT_DEVICE[] = "android/bluetooth/BluetoothDevice"; static const char CLASSPATH_BT_UUID[] = "java/util/UUID"; static const char CLASSPATH_LE_SCANNER[] = "android/bluetooth/le/BluetoothLeScanner"; static const char CLASSPATH_LE_SCANSETTINGS[] = "android/bluetooth/le/ScanSettings"; static const char METHODID_OBJECTNONPARAM[] = "()Landroid/bluetooth/BluetoothAdapter;"; static const char METHODID_BT_DEVICE[] = "()Landroid/bluetooth/BluetoothDevice;"; static const char METHODID_BT_REMOTE_DEVICE[] = "(Ljava/lang/String;)Landroid/bluetooth/BluetoothDevice;"; static const jint GATT_PROFILE = 7; static const jint GATT_SUCCESS = 0; static const jint BOND_BONDED = 12; static const jint BOND_BONDING = 11; static const jint BOND_NONE = 10; static const uint16_t STATE_CONNECTED = 3; static const uint16_t STATE_SERVICE_CONNECTED = 2; static const uint16_t STATE_DISCONNECTED = 1; static const uint16_t GATT_ERROR = 133; static const uint16_t STATE_SEND_NONE = 1; static const uint16_t STATE_SEND_SUCCESS = 2; static const uint16_t STATE_SEND_FAIL = 3; static const uint16_t STATE_SENDING = 4; static const uint16_t STATE_SEND_PREPARING = 5; static const uint16_t STATE_SEND_MTU_NEGO_SUCCESS = 6; /** * get uuid(jni object) from uuid(character). * @param[in] env JNI interface pointer. * @param[in] uuid uuid(character). * @return uuid(jni object). */ jobject CALEGetUuidFromString(JNIEnv *env, const char* uuid); /** * get parcel uuid object. * @param[in] env JNI interface pointer. * @param[in] uuid uuid (jni object). * @return parcel uuid object. */ jobject CALEGetParcelUuid(JNIEnv *env, jobject uuid); /** * get parcel uuid object from uuid string value. * @param[in] env JNI interface pointer. * @param[in] uuid uuid (const char*). * @return parcel uuid object. */ jobject CALEGetParcelUuidFromString(JNIEnv *env, const char* uuid); /** * get address from a local device. * @param[in] env JNI interface pointer. * @return local address. */ jstring CALEGetLocalDeviceAddress(JNIEnv *env); /** * get bonded list. * @param[in] env JNI interface pointer. * @return bonded list. */ jobjectArray CALEGetBondedDevices(JNIEnv *env); /** * get constants information of bluetooth state-on. * @param[in] env JNI interface pointer. * @return constants information of bluetooth state-on. */ jint CALEGetBTStateOnInfo(JNIEnv *env); /** * check this device can be supported as BLE client or server. * @param[in] env JNI interface pointer. * @param[in] level Android API Level to support. * @return ::CA_STATUS_OK or ERROR CODES (::CAResult_t error codes in cacommon.h). */ CAResult_t CALECheckPlatformVersion(JNIEnv *env, uint16_t level); /** * get constants information of android.os.Build.VERSION.SDK_INT. * @param[in] env JNI interface pointer. * @return constants information of android.os.Build.VERSION.SDK_INT. */ jint CALEGetBuildVersion(JNIEnv *env); /** * get constants information of android.os.Build.VERSION_CODES.[VersionName]. * @param[in] env JNI interface pointer. * @param[in] versionName version name (.., KITKAT, LOLLIPOP, ..). * @return constants information of android.os.Build.VERSION_CODES.[VersionName]. */ jint CALEGetBuildVersionCodeForName(JNIEnv *env, const char* versionName); /** * get bluetooth adapter state information. * @param[in] env JNI interface pointer. * @return JNI_TRUE if the local adapter is turned on. */ jboolean CALEIsEnableBTAdapter(JNIEnv *env); /** * get address from remote device. * @param[in] env JNI interface pointer. * @param[in] bluetoothDevice bluetooth device object. * @return remote address. */ jstring CALEGetAddressFromBTDevice(JNIEnv *env, jobject bluetoothDevice); /** * get value from selected constants. * @param[in] env JNI interface pointer. * @param[in] classType class type * @param[in] name constants name to get. * @return remote address. */ jint CALEGetConstantsValue(JNIEnv *env, const char* classType, const char* name); /** * get bluetooth device object from bluetooth adapter. * @param[in] env JNI interface pointer. * @param[in] address bluetooth address. * @return bluetooth device object. */ jobject CALEGetRemoteDevice(JNIEnv *env, jstring address); /** * get address from gatt profile object. * @param[in] env JNI interface pointer. * @param[in] gatt gatt profile object. * @return LE address. */ jstring CALEGetAddressFromGatt(JNIEnv *env, jobject gatt); #ifdef __cplusplus } /* extern "C" */ #endif #endif /* CA_LE_UTILS_H_ */
2,502
1,444
<filename>Mage.Tests/src/test/java/org/mage/test/cards/copy/HelmOfTheHostTest.java package org.mage.test.cards.copy; import mage.constants.PhaseStep; import mage.constants.Zone; import mage.counters.CounterType; import org.junit.Test; import org.mage.test.serverside.base.CardTestPlayerBase; /** * * @author LevelX2 */ public class HelmOfTheHostTest extends CardTestPlayerBase { /** * If you animate Gideon of the Trials and equip it with Helm of the Host * the nonlegendary copies can't become creatures with the 0 ability. You * can activate it just fine (and it gets put on the stack) but nothing * happens and you can't use another ability. */ @Test public void testCopyPlaneswalker() { addCard(Zone.BATTLEFIELD, playerA, "Mountain", 5); //Starting Loyalty: 3 // +1: Until your next turn, prevent all damage target permanent would deal. // 0: Until end of turn, Gideon of the Trials becomes a 4/4 Human Soldier creature with indestructible that's still a planeswalker. Prevent all damage that would be dealt to him this turn. // 0: You get an emblem with "As long as you control a Gideon planeswalker, you can't lose the game and your opponent can't win the game." addCard(Zone.BATTLEFIELD, playerA, "Gideon of the Trials", 1); // At the beginning of combat on your turn, create a token that's a copy of equipped creature, except the token isn't legendary if equipped creature is legendary. That token gains haste. // Equip {5} addCard(Zone.BATTLEFIELD, playerA, "Helm of the Host", 1); activateAbility(1, PhaseStep.PRECOMBAT_MAIN, playerA, "0: Until end of turn"); activateAbility(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Equip"); activateAbility(3, PhaseStep.PRECOMBAT_MAIN, playerA, "0: Until end of turn"); activateAbility(3, PhaseStep.PRECOMBAT_MAIN, playerA, "0: Until end of turn"); attack(3, playerA, "Gideon of the Trials"); attack(3, playerA, "Gideon of the Trials"); setStopAt(4, PhaseStep.PRECOMBAT_MAIN); execute(); assertPermanentCount(playerA, "Gideon of the Trials", 2); assertCounterCount("Gideon of the Trials", CounterType.LOYALTY, 3); assertLife(playerB, 12); assertLife(playerA, 20); } }
822
1,178
# Copyright 2020 Makani Technologies LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Constant values used by the Google Client API. The API relies on a whole bunch of strings, the documentation of which is fragmented. This module reduces the need to clutter other code with string literals and serves as a central reference point. """ API_VERSION = 'v1' GCE_API_URL = 'https://www.googleapis.com/compute/%s/projects' % API_VERSION # Used by GCE instances to obtain access tokens for OAuth2 authentication. # In general, the URI is of the form <metadata_server>/<service_account>/token. GCE_TOKEN_URI = ('http://metadata/computeMetadata/v1/instance/service-accounts' '/default/token') # OAuth2 uses authorization scopes to control access to resources. The list # below is only a small subset of all scopes. # # See: # https://developers.google.com/compute/docs/api/how-tos/authorization _SCOPE_NAMES = [ 'compute', 'compute.readonly', 'devstorage.full_control', 'devstorage.read_only', 'devstorage.read_write', 'devstorage.write_only', 'gerritcodereview', ] SCOPES = {n: 'https://www.googleapis.com/auth/' + n for n in _SCOPE_NAMES} # See https://developers.google.com/compute/docs/zones. ZONES = [ 'asia-east1-a', 'asia-east1-b', 'asia-east1-c', 'europe-west1-a', 'europe-west1-b', 'us-central1-a', 'us-central1-b', 'us-central1-f', ] # See https://developers.google.com/compute/docs/machine-types. MACHINE_TYPES = [ 'f1-micro', 'g1-small', 'n1-highcpu-2', 'n1-highcpu-4', 'n1-highcpu-8', 'n1-highcpu-16', 'n1-highmem-2', 'n1-highmem-4', 'n1-highmem-8', 'n1-highmem-16', 'n1-standard-1', 'n1-standard-2', 'n1-standard-4', 'n1-standard-8', 'n1-standard-16', ] # Client ID for OAuth2. This identifies locally-stored credentials. Each client # ID requires separate approval. OAUTH2_CLIENT_ID = 'makani-app' # Some services require a user agent. This is strictly for identification and, # AFAICT, we can choose anything we like. USER_AGENT = 'makani-user-agent/0.1'
975
1,514
<reponame>tris790/SDL_mixer /******************************************************************** * * * THIS FILE IS PART OF THE OggVorbis SOFTWARE CODEC SOURCE CODE. * * USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS * * GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE * * IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. * * * * THE OggVorbis SOURCE CODE IS (C) COPYRIGHT 1994-2014 * * by the Xiph.Org Foundation http://www.xiph.org/ * * * ******************************************************************** function: utility functions for loading .vqh and .vqd files ********************************************************************/ #include <stdlib.h> #include <stdio.h> #include <math.h> #include <string.h> #include <errno.h> #include "bookutil.h" int _best(codebook *book, float *a, int step){ int dim=book->dim; int i,j,o; int minval=book->minval; int del=book->delta; int qv=book->quantvals; int ze=(qv>>1); int index=0; /* assumes integer/centered encoder codebook maptype 1 no more than dim 8 */ if(del!=1){ for(i=0,o=step*(dim-1);i<dim;i++,o-=step){ int v = ((int)rint(a[o])-minval+(del>>1))/del; int m = (v<ze ? ((ze-v)<<1)-1 : ((v-ze)<<1)); index = index*qv+ (m<0?0:(m>=qv?qv-1:m)); } }else{ for(i=0,o=step*(dim-1);i<dim;i++,o-=step){ int v = (int)rint(a[o])-minval; int m = (v<ze ? ((ze-v)<<1)-1 : ((v-ze)<<1)); index = index*qv+ (m<0?0:(m>=qv?qv-1:m)); } } if(book->c->lengthlist[index]<=0){ const static_codebook *c=book->c; int best=-1; /* assumes integer/centered encoder codebook maptype 1 no more than dim 8 */ int e[8]={0,0,0,0,0,0,0,0}; int maxval = book->minval + book->delta*(book->quantvals-1); for(i=0;i<book->entries;i++){ if(c->lengthlist[i]>0){ float this=0; for(j=0;j<dim;j++){ float val=(e[j]-a[j*step]); this+=val*val; } if(best==-1 || this<best){ best=this; index=i; } } /* assumes the value patterning created by the tools in vq/ */ j=0; while(e[j]>=maxval) e[j++]=0; if(e[j]>=0) e[j]+=book->delta; e[j]= -e[j]; } } return index; } /* A few little utils for reading files */ /* read a line. Use global, persistent buffering */ static char *linebuffer=NULL; static int lbufsize=0; char *get_line(FILE *in){ long sofar=0; if(feof(in))return NULL; while(1){ int gotline=0; while(!gotline){ if(sofar+1>=lbufsize){ if(!lbufsize){ lbufsize=1024; linebuffer=_ogg_malloc(lbufsize); }else{ lbufsize*=2; linebuffer=_ogg_realloc(linebuffer,lbufsize); } } { long c=fgetc(in); switch(c){ case EOF: if(sofar==0)return(NULL); /* fallthrough correct */ case '\n': linebuffer[sofar]='\0'; gotline=1; break; default: linebuffer[sofar++]=c; linebuffer[sofar]='\0'; break; } } } if(linebuffer[0]=='#'){ sofar=0; }else{ return(linebuffer); } } } /* read the next numerical value from the given file */ static char *value_line_buff=NULL; int get_line_value(FILE *in,float *value){ char *next; if(!value_line_buff)return(-1); *value=strtod(value_line_buff, &next); if(next==value_line_buff){ value_line_buff=NULL; return(-1); }else{ value_line_buff=next; while(*value_line_buff>44)value_line_buff++; if(*value_line_buff==44)value_line_buff++; return(0); } } int get_next_value(FILE *in,float *value){ while(1){ if(get_line_value(in,value)){ value_line_buff=get_line(in); if(!value_line_buff)return(-1); }else{ return(0); } } } int get_next_ivalue(FILE *in,long *ivalue){ float value; int ret=get_next_value(in,&value); *ivalue=value; return(ret); } static float sequence_base=0.f; static int v_sofar=0; void reset_next_value(void){ value_line_buff=NULL; sequence_base=0.f; v_sofar=0; } char *setup_line(FILE *in){ reset_next_value(); value_line_buff=get_line(in); return(value_line_buff); } int get_vector(codebook *b,FILE *in,int start, int n,float *a){ int i; const static_codebook *c=b->c; while(1){ if(v_sofar==n || get_line_value(in,a)){ reset_next_value(); if(get_next_value(in,a)) break; for(i=0;i<start;i++){ sequence_base=*a; get_line_value(in,a); } } for(i=1;i<c->dim;i++) if(get_line_value(in,a+i)) break; if(i==c->dim){ float temp=a[c->dim-1]; for(i=0;i<c->dim;i++)a[i]-=sequence_base; if(c->q_sequencep)sequence_base=temp; v_sofar++; return(0); } sequence_base=0.f; } return(-1); } /* read lines fromt he beginning until we find one containing the specified string */ char *find_seek_to(FILE *in,char *s){ rewind(in); while(1){ char *line=get_line(in); if(line){ if(strstr(line,s)) return(line); }else return(NULL); } } /* this reads the format as written by vqbuild/latticebuild; innocent (legal) tweaking of the file that would not affect its valid header-ness will break this routine */ codebook *codebook_load(char *filename){ codebook *b=_ogg_calloc(1,sizeof(codebook)); static_codebook *c=(static_codebook *)(b->c=_ogg_calloc(1,sizeof(static_codebook))); int quant_to_read=0; FILE *in=fopen(filename,"r"); char *line; long i; if(in==NULL){ fprintf(stderr,"Couldn't open codebook %s\n",filename); exit(1); } /* find the codebook struct */ find_seek_to(in,"static const static_codebook "); /* get the major important values */ line=get_line(in); if(sscanf(line,"%ld, %ld,", &(c->dim),&(c->entries))!=2){ fprintf(stderr,"1: syntax in %s in line:\t %s",filename,line); exit(1); } line=get_line(in); line=get_line(in); if(sscanf(line,"%d, %ld, %ld, %d, %d,", &(c->maptype),&(c->q_min),&(c->q_delta),&(c->q_quant), &(c->q_sequencep))!=5){ fprintf(stderr,"1: syntax in %s in line:\t %s",filename,line); exit(1); } switch(c->maptype){ case 0: quant_to_read=0; break; case 1: quant_to_read=_book_maptype1_quantvals(c); break; case 2: quant_to_read=c->entries*c->dim; break; } /* load the quantized entries */ find_seek_to(in,"static const long _vq_quantlist_"); reset_next_value(); c->quantlist=_ogg_malloc(sizeof(long)*quant_to_read); for(i=0;i<quant_to_read;i++) if(get_next_ivalue(in,c->quantlist+i)){ fprintf(stderr,"out of data while reading codebook %s\n",filename); exit(1); } /* load the lengthlist */ find_seek_to(in,"_lengthlist"); reset_next_value(); c->lengthlist=_ogg_malloc(sizeof(long)*c->entries); for(i=0;i<c->entries;i++) if(get_next_ivalue(in,c->lengthlist+i)){ fprintf(stderr,"out of data while reading codebook %s\n",filename); exit(1); } /* got it all */ fclose(in); vorbis_book_init_encode(b,c); b->valuelist=_book_unquantize(c,c->entries,NULL); return(b); } void spinnit(char *s,int n){ static int p=0; static long lasttime=0; long test; struct timeval thistime; gettimeofday(&thistime,NULL); test=thistime.tv_sec*10+thistime.tv_usec/100000; if(lasttime!=test){ lasttime=test; fprintf(stderr,"%s%d ",s,n); p++;if(p>3)p=0; switch(p){ case 0: fprintf(stderr,"| \r"); break; case 1: fprintf(stderr,"/ \r"); break; case 2: fprintf(stderr,"- \r"); break; case 3: fprintf(stderr,"\\ \r"); break; } fflush(stderr); } } void build_tree_from_lengths(int vals, long *hist, long *lengths){ int i,j; long *membership=_ogg_malloc(vals*sizeof(long)); long *histsave=alloca(vals*sizeof(long)); memcpy(histsave,hist,vals*sizeof(long)); for(i=0;i<vals;i++)membership[i]=i; /* find codeword lengths */ /* much more elegant means exist. Brute force n^2, minimum thought */ for(i=vals;i>1;i--){ int first=-1,second=-1; long least=-1; spinnit("building... ",i); /* find the two nodes to join */ for(j=0;j<vals;j++) if(least==-1 || hist[j]<=least){ least=hist[j]; first=membership[j]; } least=-1; for(j=0;j<vals;j++) if((least==-1 || hist[j]<=least) && membership[j]!=first){ least=hist[j]; second=membership[j]; } if(first==-1 || second==-1){ fprintf(stderr,"huffman fault; no free branch\n"); exit(1); } /* join them */ least=hist[first]+hist[second]; for(j=0;j<vals;j++) if(membership[j]==first || membership[j]==second){ membership[j]=first; hist[j]=least; lengths[j]++; } } for(i=0;i<vals-1;i++) if(membership[i]!=membership[i+1]){ fprintf(stderr,"huffman fault; failed to build single tree\n"); exit(1); } /* for sanity check purposes: how many bits would it have taken to encode the training set? */ { long bitsum=0; long samples=0; for(i=0;i<vals;i++){ bitsum+=(histsave[i]-1)*lengths[i]; samples+=histsave[i]-1; } if(samples){ fprintf(stderr,"\rTotal samples in training set: %ld \n",samples); fprintf(stderr,"\rTotal bits used to represent training set: %ld\n", bitsum); } } free(membership); } /* wrap build_tree_from_lengths to allow zero entries in the histogram */ void build_tree_from_lengths0(int vals, long *hist, long *lengths){ /* pack the 'sparse' hit list into a dense list, then unpack the lengths after the build */ int upper=0,i; long *lengthlist=_ogg_calloc(vals,sizeof(long)); long *newhist=alloca(vals*sizeof(long)); for(i=0;i<vals;i++) if(hist[i]>0) newhist[upper++]=hist[i]; if(upper != vals){ fprintf(stderr,"\rEliminating %d unused entries; %d entries remain\n", vals-upper,upper); } build_tree_from_lengths(upper,newhist,lengthlist); upper=0; for(i=0;i<vals;i++) if(hist[i]>0) lengths[i]=lengthlist[upper++]; else lengths[i]=0; free(lengthlist); } void write_codebook(FILE *out,char *name,const static_codebook *c){ int i,j,k; /* save the book in C header form */ /* first, the static vectors, then the book structure to tie it together. */ /* quantlist */ if(c->quantlist){ long vals=(c->maptype==1?_book_maptype1_quantvals(c):c->entries*c->dim); fprintf(out,"static const long _vq_quantlist_%s[] = {\n",name); for(j=0;j<vals;j++){ fprintf(out,"\t%ld,\n",c->quantlist[j]); } fprintf(out,"};\n\n"); } /* lengthlist */ fprintf(out,"static const char _vq_lengthlist_%s[] = {\n",name); for(j=0;j<c->entries;){ fprintf(out,"\t"); for(k=0;k<16 && j<c->entries;k++,j++) fprintf(out,"%2ld,",c->lengthlist[j]); fprintf(out,"\n"); } fprintf(out,"};\n\n"); /* tie it all together */ fprintf(out,"static const static_codebook %s = {\n",name); fprintf(out,"\t%ld, %ld,\n",c->dim,c->entries); fprintf(out,"\t(char *)_vq_lengthlist_%s,\n",name); fprintf(out,"\t%d, %ld, %ld, %d, %d,\n", c->maptype,c->q_min,c->q_delta,c->q_quant,c->q_sequencep); if(c->quantlist) fprintf(out,"\t(long *)_vq_quantlist_%s,\n",name); else fprintf(out,"\tNULL,\n"); fprintf(out,"\t0\n};\n\n"); }
5,669
26,901
<reponame>lff0305/apollo /* * Copyright 2021 Apollo Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.ctrip.framework.apollo.portal.entity.bo; import com.ctrip.framework.apollo.common.dto.ReleaseDTO; import com.ctrip.framework.apollo.portal.entity.bo.KVEntity; import java.util.Set; public class ReleaseBO { private ReleaseDTO baseInfo; private Set<KVEntity> items; public ReleaseDTO getBaseInfo() { return baseInfo; } public void setBaseInfo(ReleaseDTO baseInfo) { this.baseInfo = baseInfo; } public Set<KVEntity> getItems() { return items; } public void setItems(Set<KVEntity> items) { this.items = items; } }
374
2,338
// RUN: %check_clang_tidy %s cppcoreguidelines-narrowing-conversions %t \ // RUN: -- -- -target x86_64-unknown-linux -funsigned-char void narrow_integer_to_unsigned_integer_is_ok() { signed char sc; short s; int i; long l; long long ll; char c; unsigned short us; unsigned int ui; unsigned long ul; unsigned long long ull; ui = sc; c = s; c = i; c = l; c = ll; c = c; c = us; c = ui; c = ul; c = ull; } void narrow_integer_to_signed_integer_is_not_ok() { signed char sc; short s; int i; long l; long long ll; char c; unsigned short us; unsigned int ui; unsigned long ul; unsigned long long ull; sc = sc; sc = s; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'short' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] sc = i; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'int' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] sc = l; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'long' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] sc = ll; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'long long' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] sc = c; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'char' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] sc = us; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'unsigned short' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] sc = ui; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'unsigned int' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] sc = ul; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'unsigned long' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] sc = ull; // CHECK-MESSAGES: :[[@LINE-1]]:8: warning: narrowing conversion from 'unsigned long long' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] } void narrow_constant_to_unsigned_integer_is_ok() { char c1 = -128; // unsigned dst type is well defined. char c2 = 127; // unsigned dst type is well defined. char c3 = -129; // unsigned dst type is well defined. char c4 = 128; // unsigned dst type is well defined. unsigned char uc1 = 0; unsigned char uc2 = 255; unsigned char uc3 = -1; // unsigned dst type is well defined. unsigned char uc4 = 256; // unsigned dst type is well defined. signed char sc = 128; // CHECK-MESSAGES: :[[@LINE-1]]:20: warning: narrowing conversion from constant value 128 (0x00000080) of type 'int' to signed type 'signed char' is implementation-defined [cppcoreguidelines-narrowing-conversions] } void narrow_conditional_operator_contant_to_unsigned_is_ok(bool b) { // conversion to unsigned char type is well defined. char c1 = b ? 1 : 0; char c2 = b ? 1 : 256; char c3 = b ? -1 : 0; }
1,113
319
package org.openimaj.processing; import java.util.ArrayList; import java.util.List; import org.openimaj.image.MBFImage; import org.openimaj.image.processing.face.detection.DetectedFace; import org.openimaj.image.processing.face.detection.HaarCascadeDetector; import org.openimaj.image.processing.resize.ResizeProcessor; import org.openimaj.math.geometry.shape.Rectangle; import org.openimaj.video.capture.VideoCapture; import org.openimaj.video.capture.VideoCaptureException; import processing.core.PApplet; import processing.core.PConstants; import processing.core.PImage; import processing.core.PShape; /** * @author <NAME> (<EMAIL>) * */ public class OpenIMAJ implements PConstants{ private static final int DEFAULT_WIDTH = 640; private static final int DEFAULT_HEIGHT = 480; PApplet parent; private MBFImage oiImage; private HaarCascadeDetector faceDetector; private VideoCapture capture; /** * @param parent */ public OpenIMAJ(PApplet parent) { this(); this.parent = parent; parent.registerMethod("dispose", this); parent.registerMethod("pre", this); } /** * */ public OpenIMAJ() { faceDetector = new HaarCascadeDetector(80); } /** * Initialise face detection with minimum face size * @param min */ public void initFace(int min){ faceDetector = new HaarCascadeDetector(min); } /** * Start a video capture, default size, default device */ public void startCapture(){ try { this.capture = new VideoCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT); } catch (VideoCaptureException e) { } } /** * Initialise video capture on the default device * * @param width * @param height */ public void startCapture(int width, int height){ try { this.capture = new VideoCapture(width, height); } catch (VideoCaptureException e) { } } /** * Initialise video capture * * @param width * @param height * @param device */ public void startCapture(int width, int height, int device){ try { this.capture = new VideoCapture(width, height,VideoCapture.getVideoDevices().get(device)); } catch (VideoCaptureException e) { } } /** * Given an initialised video capture, capture a {@link PImage} * @return capture */ public PImage capturePImage(){ MBFImage frame = this.capture.getNextFrame(); return asPImage(frame); } /** * Given an initialised video capture, capture a {@link PImage} * @param setToCurrentFrame whether the current openimaj frame (for analysis) should be set from capture * @return capture */ public PImage capturePImage(boolean setToCurrentFrame){ MBFImage frame = this.capture.getNextFrame(); if(setToCurrentFrame){ this.oiImage = frame.clone(); } return asPImage(frame); } /** * Capture an {@link MBFImage} * @return */ public MBFImage capture(){ MBFImage frame = this.capture.getNextFrame(); return frame; } public MBFImage capture(boolean setToCurrentFrame){ MBFImage frame = this.capture.getNextFrame(); if(setToCurrentFrame){ this.oiImage = frame.clone(); } return frame; } public PImage asPImage(MBFImage frame) { PImage img = this.parent.createImage(frame.getWidth(), frame.getHeight(), RGB); img.pixels = frame.toPackedARGBPixels(); return img; } /** * */ public void pre(){ } /** * Updates the OpenIMAJ held {@link MBFImage} instance from the whole parent {@link PApplet} */ public void updateImage() { this.parent.loadPixels(); updateImage(this.parent.pixels,this.parent.width,this.parent.height); } /** * @param capture */ public void updateImage(PImage capture){ updateImage(capture.pixels,capture.width, capture.height); } /** * @param capture */ public void updateImage(MBFImage capture){ this.oiImage = capture; } /** * Updates the OpenIMAJ held {@link MBFImage} instance * @param pixels the pixels to use as the MBFImage * @param width the width of the image * @param height the height of the image */ public void updateImage(int[] pixels,int width, int height) { this.oiImage = new MBFImage(pixels,width, height); } /** * */ public void dispose() { this.oiImage = null; } public void resize(int width, int height){ if(this.oiImage == null) return; this.oiImage.processInplace(new ResizeProcessor(width, height)); } /** * Detect faces using {@link HaarCascadeDetector}, return an {@link ArrayList} of * {@link PShape} instances. Note the {@link PShape} instances have no fill and * a colour: 255,0,0 * @return detected faces */ public ArrayList<PShape> faces(){ ArrayList<PShape> faces = new ArrayList<PShape>(); List<DetectedFace> detected = faceDetector.detectFaces(oiImage.flatten()); for (DetectedFace detectedFace : detected) { Rectangle bounds = detectedFace.getBounds(); PShape detectedPShape = this.parent.createShape(RECT,bounds.x,bounds.y,bounds.width,bounds.height); detectedPShape.setFill(false); detectedPShape.setStroke(this.parent.color(255f, 0, 0)); faces.add(detectedPShape); } return faces; } }
1,740
2,151
<reponame>zipated/src<filename>content/renderer/service_worker/service_worker_provider_context_unittest.cc // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/renderer/service_worker/service_worker_provider_context.h" #include <memory> #include "base/macros.h" #include "base/message_loop/message_loop.h" #include "base/run_loop.h" #include "base/test/scoped_feature_list.h" #include "content/child/thread_safe_sender.h" #include "content/common/service_worker/service_worker_container.mojom.h" #include "content/common/service_worker/service_worker_messages.h" #include "content/common/service_worker/service_worker_types.h" #include "content/public/common/content_features.h" #include "content/public/common/resource_type.h" #include "content/renderer/service_worker/controller_service_worker_connector.h" #include "content/renderer/service_worker/service_worker_provider_context.h" #include "content/renderer/service_worker/web_service_worker_impl.h" #include "content/renderer/service_worker/web_service_worker_registration_impl.h" #include "mojo/public/cpp/bindings/associated_binding_set.h" #include "net/traffic_annotation/network_traffic_annotation_test_helper.h" #include "services/network/public/cpp/features.h" #include "services/network/public/cpp/shared_url_loader_factory.h" #include "services/network/public/cpp/wrapper_shared_url_loader_factory.h" #include "services/network/public/mojom/url_loader_factory.mojom.h" #include "services/network/test/test_url_loader_client.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/blink/public/mojom/service_worker/service_worker_error_type.mojom.h" #include "third_party/blink/public/mojom/service_worker/service_worker_object.mojom.h" #include "third_party/blink/public/mojom/service_worker/service_worker_provider_type.mojom.h" #include "third_party/blink/public/mojom/service_worker/service_worker_registration.mojom.h" #include "third_party/blink/public/platform/modules/serviceworker/web_service_worker_provider_client.h" #include "third_party/blink/public/platform/web_feature.mojom.h" namespace content { namespace service_worker_provider_context_unittest { class MockServiceWorkerObjectHost : public blink::mojom::ServiceWorkerObjectHost { public: explicit MockServiceWorkerObjectHost(int64_t version_id) : version_id_(version_id) {} ~MockServiceWorkerObjectHost() override = default; blink::mojom::ServiceWorkerObjectInfoPtr CreateObjectInfo() { auto info = blink::mojom::ServiceWorkerObjectInfo::New(); info->version_id = version_id_; bindings_.AddBinding(this, mojo::MakeRequest(&info->host_ptr_info)); info->request = mojo::MakeRequest(&remote_object_); return info; } int GetBindingCount() const { return bindings_.size(); } private: // Implements blink::mojom::ServiceWorkerObjectHost. void PostMessageToServiceWorker( ::blink::TransferableMessage message) override { NOTREACHED(); } void TerminateForTesting(TerminateForTestingCallback callback) override { NOTREACHED(); } const int64_t version_id_; mojo::AssociatedBindingSet<blink::mojom::ServiceWorkerObjectHost> bindings_; blink::mojom::ServiceWorkerObjectAssociatedPtr remote_object_; }; class MockServiceWorkerRegistrationObjectHost : public blink::mojom::ServiceWorkerRegistrationObjectHost { public: explicit MockServiceWorkerRegistrationObjectHost(int64_t registration_id) : registration_id_(registration_id) { bindings_.set_connection_error_handler( base::Bind(&MockServiceWorkerRegistrationObjectHost::OnConnectionError, base::Unretained(this))); } ~MockServiceWorkerRegistrationObjectHost() override = default; blink::mojom::ServiceWorkerRegistrationObjectInfoPtr CreateObjectInfo( MockServiceWorkerObjectHost* active, MockServiceWorkerObjectHost* waiting, MockServiceWorkerObjectHost* installing) { auto info = blink::mojom::ServiceWorkerRegistrationObjectInfo::New(); info->registration_id = registration_id_; bindings_.AddBinding(this, mojo::MakeRequest(&info->host_ptr_info)); info->request = mojo::MakeRequest(&remote_registration_); info->active = active->CreateObjectInfo(); info->waiting = waiting->CreateObjectInfo(); info->installing = installing->CreateObjectInfo(); return info; } int GetBindingCount() const { return bindings_.size(); } private: // Implements blink::mojom::ServiceWorkerRegistrationObjectHost. void Update(UpdateCallback callback) override { std::move(callback).Run(blink::mojom::ServiceWorkerErrorType::kNone, base::nullopt); } void Unregister(UnregisterCallback callback) override { std::move(callback).Run(blink::mojom::ServiceWorkerErrorType::kNone, base::nullopt); } void EnableNavigationPreload( bool enable, EnableNavigationPreloadCallback callback) override { std::move(callback).Run(blink::mojom::ServiceWorkerErrorType::kNone, base::nullopt); } void GetNavigationPreloadState( GetNavigationPreloadStateCallback callback) override { std::move(callback).Run(blink::mojom::ServiceWorkerErrorType::kNone, base::nullopt, nullptr); } void SetNavigationPreloadHeader( const std::string& value, SetNavigationPreloadHeaderCallback callback) override { std::move(callback).Run(blink::mojom::ServiceWorkerErrorType::kNone, base::nullopt); } void OnConnectionError() { // If there are still bindings, |this| is still being used. if (!bindings_.empty()) return; // Will destroy corresponding remote WebServiceWorkerRegistrationImpl // instance. remote_registration_.reset(); } int64_t registration_id_; mojo::AssociatedBindingSet<blink::mojom::ServiceWorkerRegistrationObjectHost> bindings_; blink::mojom::ServiceWorkerRegistrationObjectAssociatedPtr remote_registration_; }; class MockWebServiceWorkerProviderClientImpl : public blink::WebServiceWorkerProviderClient { public: MockWebServiceWorkerProviderClientImpl() {} ~MockWebServiceWorkerProviderClientImpl() override {} void SetController(std::unique_ptr<blink::WebServiceWorker::Handle> handle, bool should_notify_controller_change) override { was_set_controller_called_ = true; } void DispatchMessageEvent( std::unique_ptr<blink::WebServiceWorker::Handle> handle, blink::TransferableMessage message) override { was_dispatch_message_event_called_ = true; } void CountFeature(blink::mojom::WebFeature feature) override { used_features_.insert(feature); } bool was_set_controller_called() const { return was_set_controller_called_; } bool was_dispatch_message_event_called() const { return was_dispatch_message_event_called_; } const std::set<blink::mojom::WebFeature>& used_features() const { return used_features_; } private: bool was_set_controller_called_ = false; bool was_dispatch_message_event_called_ = false; std::set<blink::mojom::WebFeature> used_features_; }; // S13nServiceWorker: a fake URLLoaderFactory implementation that basically // does nothing but records the requests. class FakeURLLoaderFactory final : public network::mojom::URLLoaderFactory { public: FakeURLLoaderFactory() = default; ~FakeURLLoaderFactory() override = default; void AddBinding(network::mojom::URLLoaderFactoryRequest request) { bindings_.AddBinding(this, std::move(request)); } // network::mojom::URLLoaderFactory: void CreateLoaderAndStart(network::mojom::URLLoaderRequest request, int32_t routing_id, int32_t request_id, uint32_t options, const network::ResourceRequest& url_request, network::mojom::URLLoaderClientPtr client, const net::MutableNetworkTrafficAnnotationTag& traffic_annotation) override { // Does nothing, but just record the request and hold the client (to avoid // connection errors). last_url_ = url_request.url; clients_.push_back(std::move(client)); } void Clone(network::mojom::URLLoaderFactoryRequest factory) override { NOTREACHED(); } size_t clients_count() const { return clients_.size(); } GURL last_request_url() const { return last_url_; } private: mojo::BindingSet<network::mojom::URLLoaderFactory> bindings_; std::vector<network::mojom::URLLoaderClientPtr> clients_; GURL last_url_; DISALLOW_COPY_AND_ASSIGN(FakeURLLoaderFactory); }; // S13nServiceWorker: a fake ControllerServiceWorker implementation that // basically does nothing but records DispatchFetchEvent calls. class FakeControllerServiceWorker : public mojom::ControllerServiceWorker { public: FakeControllerServiceWorker() = default; ~FakeControllerServiceWorker() override = default; // mojom::ControllerServiceWorker: void DispatchFetchEvent( mojom::DispatchFetchEventParamsPtr params, mojom::ServiceWorkerFetchResponseCallbackPtr response_callback, DispatchFetchEventCallback callback) override { fetch_event_count_++; fetch_event_request_ = params->request; std::move(callback).Run(blink::mojom::ServiceWorkerEventStatus::COMPLETED, base::Time()); } void Clone(mojom::ControllerServiceWorkerRequest request) override { bindings_.AddBinding(this, std::move(request)); } int fetch_event_count() const { return fetch_event_count_; } const network::ResourceRequest& fetch_event_request() const { return fetch_event_request_; } private: int fetch_event_count_ = 0; network::ResourceRequest fetch_event_request_; base::OnceClosure fetch_event_callback_; mojo::BindingSet<mojom::ControllerServiceWorker> bindings_; DISALLOW_COPY_AND_ASSIGN(FakeControllerServiceWorker); }; class ServiceWorkerProviderContextTest : public testing::Test { public: ServiceWorkerProviderContextTest() = default; void EnableS13nServiceWorker() { scoped_feature_list_.InitAndEnableFeature( network::features::kNetworkService); network::mojom::URLLoaderFactoryPtr fake_loader_factory; fake_loader_factory_.AddBinding(MakeRequest(&fake_loader_factory)); loader_factory_ = base::MakeRefCounted<network::WrapperSharedURLLoaderFactory>( std::move(fake_loader_factory)); } void StartRequest(network::mojom::URLLoaderFactory* factory, const GURL& url) { network::ResourceRequest request; request.url = url; request.resource_type = static_cast<int>(RESOURCE_TYPE_SUB_RESOURCE); network::mojom::URLLoaderPtr loader; network::TestURLLoaderClient loader_client; factory->CreateLoaderAndStart( mojo::MakeRequest(&loader), 0, 0, network::mojom::kURLLoadOptionNone, request, loader_client.CreateInterfacePtr(), net::MutableNetworkTrafficAnnotationTag(TRAFFIC_ANNOTATION_FOR_TESTS)); // Need to run one more loop to make a Mojo call. base::RunLoop().RunUntilIdle(); } bool ContainsRegistration(ServiceWorkerProviderContext* provider_context, int64_t registration_id) { return provider_context->ContainsServiceWorkerRegistrationObjectForTesting( registration_id); } bool ContainsServiceWorker(ServiceWorkerProviderContext* provider_context, int64_t version_id) { return provider_context->ContainsServiceWorkerObjectForTesting(version_id); } protected: base::MessageLoop message_loop_; // S13nServiceWorker: base::test::ScopedFeatureList scoped_feature_list_; FakeURLLoaderFactory fake_loader_factory_; scoped_refptr<network::SharedURLLoaderFactory> loader_factory_; DISALLOW_COPY_AND_ASSIGN(ServiceWorkerProviderContextTest); }; TEST_F(ServiceWorkerProviderContextTest, SetController) { const int kProviderId = 10; { auto mock_service_worker_object_host = std::make_unique<MockServiceWorkerObjectHost>(200 /* version_id */); ASSERT_EQ(0, mock_service_worker_object_host->GetBindingCount()); blink::mojom::ServiceWorkerObjectInfoPtr object_info = mock_service_worker_object_host->CreateObjectInfo(); EXPECT_EQ(1, mock_service_worker_object_host->GetBindingCount()); // (1) In the case there is no WebSWProviderClient but SWProviderContext for // the provider, the passed reference should be adopted and owned by the // provider context. mojom::ServiceWorkerContainerAssociatedPtr container_ptr; mojom::ServiceWorkerContainerAssociatedRequest container_request = mojo::MakeRequestAssociatedWithDedicatedPipe(&container_ptr); auto provider_context = base::MakeRefCounted<ServiceWorkerProviderContext>( kProviderId, blink::mojom::ServiceWorkerProviderType::kForWindow, std::move(container_request), nullptr /* host_ptr_info */, nullptr /* controller_info */, nullptr /* loader_factory*/); auto info = mojom::ControllerServiceWorkerInfo::New(); info->object_info = std::move(object_info); container_ptr->SetController(std::move(info), std::vector<blink::mojom::WebFeature>(), true); base::RunLoop().RunUntilIdle(); // Destruction of the provider context should release references to the // the controller. provider_context = nullptr; base::RunLoop().RunUntilIdle(); // ServiceWorkerObjectHost Mojo connection got broken. EXPECT_EQ(0, mock_service_worker_object_host->GetBindingCount()); } { auto mock_service_worker_object_host = std::make_unique<MockServiceWorkerObjectHost>(201 /* version_id */); ASSERT_EQ(0, mock_service_worker_object_host->GetBindingCount()); blink::mojom::ServiceWorkerObjectInfoPtr object_info = mock_service_worker_object_host->CreateObjectInfo(); EXPECT_EQ(1, mock_service_worker_object_host->GetBindingCount()); // (2) In the case there are both SWProviderContext and SWProviderClient for // the provider, the passed reference should be adopted by the provider // context and then be transfered ownership to the provider client, after // that due to limitation of the mock implementation, the reference // immediately gets released. mojom::ServiceWorkerContainerHostAssociatedPtrInfo host_ptr_info; mojom::ServiceWorkerContainerHostAssociatedRequest host_request = mojo::MakeRequest(&host_ptr_info); mojom::ServiceWorkerContainerAssociatedPtr container_ptr; mojom::ServiceWorkerContainerAssociatedRequest container_request = mojo::MakeRequestAssociatedWithDedicatedPipe(&container_ptr); auto provider_context = base::MakeRefCounted<ServiceWorkerProviderContext>( kProviderId, blink::mojom::ServiceWorkerProviderType::kForWindow, std::move(container_request), std::move(host_ptr_info), nullptr /* controller_info */, nullptr /* loader_factory*/); auto provider_impl = std::make_unique<WebServiceWorkerProviderImpl>(provider_context.get()); auto client = std::make_unique<MockWebServiceWorkerProviderClientImpl>(); provider_impl->SetClient(client.get()); ASSERT_FALSE(client->was_set_controller_called()); auto info = mojom::ControllerServiceWorkerInfo::New(); info->object_info = std::move(object_info); container_ptr->SetController(std::move(info), std::vector<blink::mojom::WebFeature>(), true); base::RunLoop().RunUntilIdle(); EXPECT_TRUE(client->was_set_controller_called()); // ServiceWorkerObjectHost Mojo connection got broken. EXPECT_EQ(0, mock_service_worker_object_host->GetBindingCount()); } } // Test that clearing the controller by sending a nullptr object info results in // the provider context having a null controller. TEST_F(ServiceWorkerProviderContextTest, SetController_Null) { const int kProviderId = 10; mojom::ServiceWorkerContainerHostAssociatedPtrInfo host_ptr_info; mojom::ServiceWorkerContainerHostAssociatedRequest host_request = mojo::MakeRequest(&host_ptr_info); mojom::ServiceWorkerContainerAssociatedPtr container_ptr; mojom::ServiceWorkerContainerAssociatedRequest container_request = mojo::MakeRequestAssociatedWithDedicatedPipe(&container_ptr); auto provider_context = base::MakeRefCounted<ServiceWorkerProviderContext>( kProviderId, blink::mojom::ServiceWorkerProviderType::kForWindow, std::move(container_request), std::move(host_ptr_info), nullptr /* controller_info */, nullptr /* loader_factory*/); auto provider_impl = std::make_unique<WebServiceWorkerProviderImpl>(provider_context.get()); auto client = std::make_unique<MockWebServiceWorkerProviderClientImpl>(); provider_impl->SetClient(client.get()); container_ptr->SetController(mojom::ControllerServiceWorkerInfo::New(), std::vector<blink::mojom::WebFeature>(), true); base::RunLoop().RunUntilIdle(); EXPECT_FALSE(provider_context->TakeController()); EXPECT_TRUE(client->was_set_controller_called()); } // S13nServiceWorker: Test that SetController correctly sets (or resets) // the controller service worker for clients. TEST_F(ServiceWorkerProviderContextTest, SetControllerServiceWorker) { EnableS13nServiceWorker(); const int kProviderId = 10; // (1) Test if setting the controller via the CTOR works. auto object_host1 = std::make_unique<MockServiceWorkerObjectHost>(200 /* version_id */); ASSERT_EQ(0, object_host1->GetBindingCount()); blink::mojom::ServiceWorkerObjectInfoPtr object_info1 = object_host1->CreateObjectInfo(); EXPECT_EQ(1, object_host1->GetBindingCount()); FakeControllerServiceWorker fake_controller1; auto controller_info1 = mojom::ControllerServiceWorkerInfo::New(); mojom::ControllerServiceWorkerPtr controller_ptr1; fake_controller1.Clone(mojo::MakeRequest(&controller_ptr1)); controller_info1->object_info = std::move(object_info1); controller_info1->endpoint = controller_ptr1.PassInterface(); mojom::ServiceWorkerContainerAssociatedPtr container_ptr; mojom::ServiceWorkerContainerAssociatedRequest container_request = mojo::MakeRequestAssociatedWithDedicatedPipe(&container_ptr); auto provider_context = base::MakeRefCounted<ServiceWorkerProviderContext>( kProviderId, blink::mojom::ServiceWorkerProviderType::kForWindow, std::move(container_request), nullptr /* host_ptr_info */, std::move(controller_info1), loader_factory_); base::RunLoop().RunUntilIdle(); // Subresource loader factory must be available. auto* subresource_loader_factory1 = provider_context->GetSubresourceLoaderFactory(); ASSERT_NE(nullptr, subresource_loader_factory1); // Performing a request should reach the controller. const GURL kURL1("https://www.example.com/foo.png"); StartRequest(subresource_loader_factory1, kURL1); EXPECT_EQ(kURL1, fake_controller1.fetch_event_request().url); EXPECT_EQ(1, fake_controller1.fetch_event_count()); // (2) Test if resetting the controller to a new one via SetController // works. auto object_host2 = std::make_unique<MockServiceWorkerObjectHost>(201 /* version_id */); ASSERT_EQ(0, object_host2->GetBindingCount()); blink::mojom::ServiceWorkerObjectInfoPtr object_info2 = object_host2->CreateObjectInfo(); EXPECT_EQ(1, object_host2->GetBindingCount()); FakeControllerServiceWorker fake_controller2; auto controller_info2 = mojom::ControllerServiceWorkerInfo::New(); mojom::ControllerServiceWorkerPtr controller_ptr2; fake_controller2.Clone(mojo::MakeRequest(&controller_ptr2)); controller_info2->object_info = std::move(object_info2); controller_info2->endpoint = controller_ptr2.PassInterface(); container_ptr->SetController(std::move(controller_info2), std::vector<blink::mojom::WebFeature>(), true); // The controller is reset. References to the old controller must be // released. base::RunLoop().RunUntilIdle(); EXPECT_EQ(0, object_host1->GetBindingCount()); // Subresource loader factory must be available, and should be the same // one as we got before. auto* subresource_loader_factory2 = provider_context->GetSubresourceLoaderFactory(); ASSERT_NE(nullptr, subresource_loader_factory2); EXPECT_EQ(subresource_loader_factory1, subresource_loader_factory2); // Performing a request should reach the new controller. const GURL kURL2("https://www.example.com/foo2.png"); StartRequest(subresource_loader_factory2, kURL2); EXPECT_EQ(kURL2, fake_controller2.fetch_event_request().url); EXPECT_EQ(1, fake_controller2.fetch_event_count()); // The request should not go to the previous controller. EXPECT_EQ(1, fake_controller1.fetch_event_count()); // (3) Test if resetting the controller to nullptr works. container_ptr->SetController(mojom::ControllerServiceWorkerInfo::New(), std::vector<blink::mojom::WebFeature>(), true); // The controller is reset. References to the old controller must be // released. base::RunLoop().RunUntilIdle(); EXPECT_EQ(0, object_host2->GetBindingCount()); // Subresource loader factory must not be available. EXPECT_EQ(nullptr, provider_context->GetSubresourceLoaderFactory()); // Performing a request using the subresource factory obtained before // falls back to the network. const GURL kURL3("https://www.example.com/foo3.png"); EXPECT_EQ(0UL, fake_loader_factory_.clients_count()); StartRequest(subresource_loader_factory2, kURL3); EXPECT_EQ(kURL3, fake_loader_factory_.last_request_url()); EXPECT_EQ(1UL, fake_loader_factory_.clients_count()); // The request should not go to the previous controllers. EXPECT_EQ(1, fake_controller1.fetch_event_count()); EXPECT_EQ(1, fake_controller2.fetch_event_count()); // (4) Test if resetting the controller to yet another one via SetController // works. auto object_host4 = std::make_unique<MockServiceWorkerObjectHost>(202 /* version_id */); ASSERT_EQ(0, object_host4->GetBindingCount()); blink::mojom::ServiceWorkerObjectInfoPtr object_info4 = object_host4->CreateObjectInfo(); EXPECT_EQ(1, object_host4->GetBindingCount()); FakeControllerServiceWorker fake_controller4; auto controller_info4 = mojom::ControllerServiceWorkerInfo::New(); mojom::ControllerServiceWorkerPtr controller_ptr4; fake_controller4.Clone(mojo::MakeRequest(&controller_ptr4)); controller_info4->object_info = std::move(object_info4); controller_info4->endpoint = controller_ptr4.PassInterface(); container_ptr->SetController(std::move(controller_info4), std::vector<blink::mojom::WebFeature>(), true); base::RunLoop().RunUntilIdle(); // Subresource loader factory must be available. auto* subresource_loader_factory4 = provider_context->GetSubresourceLoaderFactory(); ASSERT_NE(nullptr, subresource_loader_factory4); // Performing a request should reach the new controller. const GURL kURL4("https://www.example.com/foo4.png"); StartRequest(subresource_loader_factory4, kURL4); EXPECT_EQ(kURL4, fake_controller4.fetch_event_request().url); EXPECT_EQ(1, fake_controller4.fetch_event_count()); // The request should not go to the previous controllers. EXPECT_EQ(1, fake_controller1.fetch_event_count()); EXPECT_EQ(1, fake_controller2.fetch_event_count()); // The request should not go to the network. EXPECT_EQ(1UL, fake_loader_factory_.clients_count()); } TEST_F(ServiceWorkerProviderContextTest, PostMessageToClient) { const int kProviderId = 10; auto mock_service_worker_object_host = std::make_unique<MockServiceWorkerObjectHost>(200 /* version_id */); ASSERT_EQ(0, mock_service_worker_object_host->GetBindingCount()); blink::mojom::ServiceWorkerObjectInfoPtr object_info = mock_service_worker_object_host->CreateObjectInfo(); EXPECT_EQ(1, mock_service_worker_object_host->GetBindingCount()); mojom::ServiceWorkerContainerHostAssociatedPtrInfo host_ptr_info; mojom::ServiceWorkerContainerHostAssociatedRequest host_request = mojo::MakeRequest(&host_ptr_info); mojom::ServiceWorkerContainerAssociatedPtr container_ptr; mojom::ServiceWorkerContainerAssociatedRequest container_request = mojo::MakeRequestAssociatedWithDedicatedPipe(&container_ptr); auto provider_context = base::MakeRefCounted<ServiceWorkerProviderContext>( kProviderId, blink::mojom::ServiceWorkerProviderType::kForWindow, std::move(container_request), std::move(host_ptr_info), nullptr /* controller_info */, nullptr /* loader_factory*/); auto provider_impl = std::make_unique<WebServiceWorkerProviderImpl>(provider_context.get()); auto client = std::make_unique<MockWebServiceWorkerProviderClientImpl>(); provider_impl->SetClient(client.get()); ASSERT_FALSE(client->was_dispatch_message_event_called()); container_ptr->PostMessageToClient(std::move(object_info), blink::TransferableMessage()); base::RunLoop().RunUntilIdle(); // The passed reference should be owned by the provider client (but the // reference is immediately released by the mock provider client). EXPECT_TRUE(client->was_dispatch_message_event_called()); EXPECT_EQ(0, mock_service_worker_object_host->GetBindingCount()); } TEST_F(ServiceWorkerProviderContextTest, CountFeature) { const int kProviderId = 10; mojom::ServiceWorkerContainerHostAssociatedPtrInfo host_ptr_info; mojom::ServiceWorkerContainerHostAssociatedRequest host_request = mojo::MakeRequest(&host_ptr_info); mojom::ServiceWorkerContainerAssociatedPtr container_ptr; mojom::ServiceWorkerContainerAssociatedRequest container_request = mojo::MakeRequestAssociatedWithDedicatedPipe(&container_ptr); auto provider_context = base::MakeRefCounted<ServiceWorkerProviderContext>( kProviderId, blink::mojom::ServiceWorkerProviderType::kForWindow, std::move(container_request), std::move(host_ptr_info), nullptr /* controller_info */, nullptr /* loader_factory*/); auto provider_impl = std::make_unique<WebServiceWorkerProviderImpl>(provider_context.get()); auto client = std::make_unique<MockWebServiceWorkerProviderClientImpl>(); container_ptr->CountFeature(blink::mojom::WebFeature::kWorkerStart); provider_impl->SetClient(client.get()); base::RunLoop().RunUntilIdle(); // Calling CountFeature() before client is set will save the feature usage in // the set, and once SetClient() is called it gets propagated to the client. ASSERT_EQ(1UL, client->used_features().size()); ASSERT_EQ(blink::mojom::WebFeature::kWorkerStart, *(client->used_features().begin())); container_ptr->CountFeature(blink::mojom::WebFeature::kWindowEvent); base::RunLoop().RunUntilIdle(); ASSERT_EQ(2UL, client->used_features().size()); ASSERT_EQ(blink::mojom::WebFeature::kWindowEvent, *(++(client->used_features().begin()))); } TEST_F(ServiceWorkerProviderContextTest, GetOrCreateRegistration) { scoped_refptr<WebServiceWorkerRegistrationImpl> registration1; scoped_refptr<WebServiceWorkerRegistrationImpl> registration2; // Set up ServiceWorkerProviderContext for client contexts. const int kProviderId = 10; auto provider_context = base::MakeRefCounted<ServiceWorkerProviderContext>( kProviderId, blink::mojom::ServiceWorkerProviderType::kForWindow, nullptr, nullptr, nullptr /* controller_info */, nullptr /* loader_factory*/); auto active_host = std::make_unique<MockServiceWorkerObjectHost>(200 /* version_id */); auto waiting_host = std::make_unique<MockServiceWorkerObjectHost>(201 /* version_id */); auto installing_host = std::make_unique<MockServiceWorkerObjectHost>(202 /* version_id */); ASSERT_EQ(0, active_host->GetBindingCount()); ASSERT_EQ(0, waiting_host->GetBindingCount()); ASSERT_EQ(0, installing_host->GetBindingCount()); const int64_t registration_id = 10; auto mock_registration_object_host = std::make_unique<MockServiceWorkerRegistrationObjectHost>( registration_id); ASSERT_EQ(0, mock_registration_object_host->GetBindingCount()); { blink::mojom::ServiceWorkerRegistrationObjectInfoPtr registration_info = mock_registration_object_host->CreateObjectInfo( active_host.get(), waiting_host.get(), installing_host.get()); // ServiceWorkerRegistrationObjectHost Mojo connection has been added. EXPECT_EQ(1, mock_registration_object_host->GetBindingCount()); // ServiceWorkerObjectHost Mojo connections have been added. EXPECT_EQ(1, active_host->GetBindingCount()); EXPECT_EQ(1, waiting_host->GetBindingCount()); EXPECT_EQ(1, installing_host->GetBindingCount()); ASSERT_FALSE(ContainsRegistration(provider_context.get(), registration_id)); // Should return a registration object newly created with adopting the // refcounts. registration1 = provider_context->GetOrCreateServiceWorkerRegistrationObject( std::move(registration_info)); EXPECT_TRUE(registration1); EXPECT_TRUE(ContainsRegistration(provider_context.get(), registration_id)); EXPECT_EQ(registration_id, registration1->RegistrationId()); EXPECT_EQ(1, mock_registration_object_host->GetBindingCount()); } { blink::mojom::ServiceWorkerRegistrationObjectInfoPtr registration_info = mock_registration_object_host->CreateObjectInfo( active_host.get(), waiting_host.get(), installing_host.get()); // ServiceWorkerRegistrationObjectHost Mojo connection has been added. EXPECT_EQ(2, mock_registration_object_host->GetBindingCount()); // ServiceWorkerObjectHost Mojo connections have been added. EXPECT_EQ(2, active_host->GetBindingCount()); EXPECT_EQ(2, waiting_host->GetBindingCount()); EXPECT_EQ(2, installing_host->GetBindingCount()); // Should return the same registration object without incrementing the // refcounts. registration2 = provider_context->GetOrCreateServiceWorkerRegistrationObject( std::move(registration_info)); EXPECT_TRUE(registration2); EXPECT_EQ(registration1, registration2); base::RunLoop().RunUntilIdle(); // The 2nd ServiceWorkerRegistrationObjectHost Mojo connection has been // dropped. EXPECT_EQ(1, mock_registration_object_host->GetBindingCount()); // The corresponding ServiceWorkerObjectHost Mojo connections have been // dropped. EXPECT_EQ(1, active_host->GetBindingCount()); EXPECT_EQ(1, waiting_host->GetBindingCount()); EXPECT_EQ(1, installing_host->GetBindingCount()); } // The registration dtor decrements the refcounts. registration1 = nullptr; registration2 = nullptr; base::RunLoop().RunUntilIdle(); EXPECT_FALSE(ContainsRegistration(provider_context.get(), registration_id)); // The 1st ServiceWorkerRegistrationObjectHost Mojo connection got broken. EXPECT_EQ(0, mock_registration_object_host->GetBindingCount()); // The corresponding ServiceWorkerObjectHost Mojo connections got broken. EXPECT_EQ(0, active_host->GetBindingCount()); EXPECT_EQ(0, waiting_host->GetBindingCount()); EXPECT_EQ(0, installing_host->GetBindingCount()); } TEST_F(ServiceWorkerProviderContextTest, GetOrCreateServiceWorker) { scoped_refptr<WebServiceWorkerImpl> worker1; scoped_refptr<WebServiceWorkerImpl> worker2; // Set up ServiceWorkerProviderContext for client contexts. const int kProviderId = 10; auto provider_context = base::MakeRefCounted<ServiceWorkerProviderContext>( kProviderId, blink::mojom::ServiceWorkerProviderType::kForWindow, nullptr, nullptr, nullptr /* controller_info */, nullptr /* loader_factory*/); const int64_t version_id = 200; auto mock_service_worker_object_host = std::make_unique<MockServiceWorkerObjectHost>(version_id); ASSERT_EQ(0, mock_service_worker_object_host->GetBindingCount()); // Should return a worker object newly created with the 1st given |info|. { blink::mojom::ServiceWorkerObjectInfoPtr info = mock_service_worker_object_host->CreateObjectInfo(); // ServiceWorkerObjectHost Mojo connection has been added. EXPECT_EQ(1, mock_service_worker_object_host->GetBindingCount()); ASSERT_FALSE(ContainsServiceWorker(provider_context.get(), version_id)); worker1 = provider_context->GetOrCreateServiceWorkerObject(std::move(info)); EXPECT_TRUE(worker1); EXPECT_TRUE(ContainsServiceWorker(provider_context.get(), version_id)); // |worker1| is holding the 1st blink::mojom::ServiceWorkerObjectHost Mojo // connection to |mock_service_worker_object_host|. EXPECT_EQ(1, mock_service_worker_object_host->GetBindingCount()); } // Should return the same worker object and release the 2nd given |info|. { blink::mojom::ServiceWorkerObjectInfoPtr info = mock_service_worker_object_host->CreateObjectInfo(); EXPECT_EQ(2, mock_service_worker_object_host->GetBindingCount()); worker2 = provider_context->GetOrCreateServiceWorkerObject(std::move(info)); EXPECT_EQ(worker1, worker2); base::RunLoop().RunUntilIdle(); // The 2nd ServiceWorkerObjectHost Mojo connection in |info| has been // dropped. EXPECT_EQ(1, mock_service_worker_object_host->GetBindingCount()); } // The dtor decrements the refcounts. worker1 = nullptr; worker2 = nullptr; base::RunLoop().RunUntilIdle(); EXPECT_FALSE(ContainsServiceWorker(provider_context.get(), version_id)); // The 1st ServiceWorkerObjectHost Mojo connection got broken. EXPECT_EQ(0, mock_service_worker_object_host->GetBindingCount()); // Should return nullptr when given nullptr. scoped_refptr<WebServiceWorkerImpl> invalid_worker = provider_context->GetOrCreateServiceWorkerObject(nullptr); EXPECT_FALSE(invalid_worker); } } // namespace service_worker_provider_context_unittest } // namespace content
11,672
332
package com.szmirren.vxApi.core.common; import java.io.File; import java.io.InputStream; import java.net.URL; import java.nio.file.Path; /** * 获取相应路径的工具 * * @author <a href="http://szmirren.com">Mirren</a> * */ public class PathUtil { /** * 判断是否在jar环境中运行 * * @return 是返回true */ public static boolean isJarEnv() { return PathUtil.class.getResource("").getPath().contains(".jar!"); } /** * 判断是否在jar环境中运行 * * @return 是返回true */ public static boolean isJarEnv(String fileName) { return Thread.currentThread().getContextClassLoader().getResource(fileName).getPath().contains(".jar!"); } /** * 获得根目录如果在jar中运行获得相对路径,反则返回当前线程运行的根目录 * * @param name * @return */ public static String getPathString(String fileName) { if (fileName == null) { throw new NullPointerException("文件名字不能为空"); } URL path = Thread.currentThread().getContextClassLoader().getResource(fileName); if (path != null && path.getPath().contains(".jar!")) { return fileName; } else { String result = path == null ? "" : path.getPath(); return result; } } /** * 通过名字获得项目的Path文件 * * @param fileName * @return */ public static Path getPath(String fileName) { File file = new File(PathUtil.getPathString(fileName)); return file.toPath(); } /** * 获得资源的流 * * @param fileName * @return */ public static InputStream getStream(String fileName) { if (fileName == null) { throw new NullPointerException("文件名字不能为空"); } return Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); } }
835
370
<filename>toggle/src/main/java/com/github/angads25/toggle/interfaces/OnStateChangedListener.java<gh_stars>100-1000 /* * Copyright (C) 2018 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.angads25.toggle.interfaces; import android.view.View; /** * <p> * Created by <NAME> on 25/2/18. * </p> * * Interface definition for a callback to be invoked when state of switch is changed. * * <p>This is a <a href="package-summary.html">event listener</a> * whose event method is {@link #onStateChanged(View, int)}. * * @since 1.1.0 */ public interface OnStateChangedListener { /** * Called when a view changes it's state. * * @param view The view whose state was changed. * @param state The state of the view. */ void onStateChanged(View view, int state); }
410
2,859
<gh_stars>1000+ /* * FXGL - JavaFX Game Library. The MIT License (MIT). * Copyright (c) AlmasB (<EMAIL>). * See LICENSE for details. */ /** * @author <NAME> (<EMAIL>) */ module com.almasb.fxgl.test { requires javafx.graphics; requires org.junit.jupiter.api; exports com.almasb.fxgl.test; }
124
1,414
<gh_stars>1000+ #ifdef SMPCORE_CPP uint8 SMPcore::op_adc(uint8 x, uint8 y) { int r = x + y + regs.p.c; regs.p.n = r & 0x80; regs.p.v = ~(x ^ y) & (x ^ r) & 0x80; regs.p.h = (x ^ y ^ r) & 0x10; regs.p.z = (uint8)r == 0; regs.p.c = r > 0xff; return r; } uint8 SMPcore::op_and(uint8 x, uint8 y) { x &= y; regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_asl(uint8 x) { regs.p.c = x & 0x80; x <<= 1; regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_cmp(uint8 x, uint8 y) { int r = x - y; regs.p.n = r & 0x80; regs.p.z = (uint8)r == 0; regs.p.c = r >= 0; return x; } uint8 SMPcore::op_dec(uint8 x) { x--; regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_eor(uint8 x, uint8 y) { x ^= y; regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_inc(uint8 x) { x++; regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_ld(uint8 x, uint8 y) { regs.p.n = y & 0x80; regs.p.z = y == 0; return y; } uint8 SMPcore::op_lsr(uint8 x) { regs.p.c = x & 0x01; x >>= 1; regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_or(uint8 x, uint8 y) { x |= y; regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_rol(uint8 x) { unsigned carry = regs.p.c << 0; regs.p.c = x & 0x80; x = (x << 1) | carry; regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_ror(uint8 x) { unsigned carry = regs.p.c << 7; regs.p.c = x & 0x01; x = carry | (x >> 1); regs.p.n = x & 0x80; regs.p.z = x == 0; return x; } uint8 SMPcore::op_sbc(uint8 x, uint8 y) { return op_adc(x, ~y); } uint8 SMPcore::op_st(uint8 x, uint8 y) { return y; } // uint16 SMPcore::op_adw(uint16 x, uint16 y) { uint16 r; regs.p.c = 0; r = op_adc(x, y); r |= op_adc(x >> 8, y >> 8) << 8; regs.p.z = r == 0; return r; } uint16 SMPcore::op_cpw(uint16 x, uint16 y) { int r = x - y; regs.p.n = r & 0x8000; regs.p.z = (uint16)r == 0; regs.p.c = r >= 0; return x; } uint16 SMPcore::op_ldw(uint16 x, uint16 y) { regs.p.n = y & 0x8000; regs.p.z = y == 0; return y; } uint16 SMPcore::op_sbw(uint16 x, uint16 y) { uint16 r; regs.p.c = 1; r = op_sbc(x, y); r |= op_sbc(x >> 8, y >> 8) << 8; regs.p.z = r == 0; return r; } #endif
1,331
1,443
<reponame>cssence/mit-license { "copyright": "<NAME>, http://www.yasyf.com", "url": "http://www.yasyf.com", "gravatar": true, "theme": "double-windsor" }
72
879
<reponame>qianfei11/zstack package org.zstack.network.service.virtualrouter.eip; import javax.persistence.metamodel.SingularAttribute; import javax.persistence.metamodel.StaticMetamodel; /** */ @StaticMetamodel(VirtualRouterEipRefVO.class) public class VirtualRouterEipRefVO_ { public static volatile SingularAttribute<VirtualRouterEipRefVO, String> eipUuid; public static volatile SingularAttribute<VirtualRouterEipRefVO, String> virtualRouterVmUuid; }
163
14,668
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef ASH_KEYBOARD_UI_KEYBOARD_LAYOUT_DELEGATE_H_ #define ASH_KEYBOARD_UI_KEYBOARD_LAYOUT_DELEGATE_H_ #include <stdint.h> #include "ash/keyboard/ui/keyboard_export.h" #include "ui/events/event.h" namespace display { class Display; } namespace aura { class Window; } namespace keyboard { // A delegate class to control the virtual keyboard layout class KEYBOARD_EXPORT KeyboardLayoutDelegate { public: virtual ~KeyboardLayoutDelegate() {} // Get the container window where the virtual keyboard show appear by default. // Usually, this would be a touchable display with input focus. // This function must not return null. virtual aura::Window* GetContainerForDefaultDisplay() = 0; // Get the container window for a particular display. |display| must be valid. virtual aura::Window* GetContainerForDisplay( const display::Display& display) = 0; // Transfer a gesture event to the Ash shelf. Any remaining gestures will be // sent directly to the shelf. Used for accessing the shelf and the home // screen even when the virtual keyboard is blocking the shelf. virtual void TransferGestureEventToShelf(const ui::GestureEvent& e) = 0; }; } // namespace keyboard #endif // ASH_KEYBOARD_UI_KEYBOARD_LAYOUT_DELEGATE_H_
427
2,637
<reponame>nateglims/amazon-freertos<gh_stars>1000+ /** * \file esp_evt.c * \brief Event helper functions */ /* * Copyright (c) 2018 <NAME> * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, * publish, distribute, sublicense, and/or sell copies of the Software, * and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. * * This file is part of ESP-AT. * * Author: <NAME> <<EMAIL>> */ #include "esp/esp_private.h" #include "esp/esp_evt.h" #include "esp/esp_mem.h" /** * \brief Check if reset was forced by user * \param[in] cc: Event handle * \return `1` if forced by user, `0` otherwise */ uint8_t esp_evt_reset_is_forced(esp_cb_t* cc) { return ESP_U8(!!cc->cb.reset.forced); } #if ESP_CFG_MODE_ACCESS_POINT || __DOXYGEN__ /** * \brief Get MAC address from station * \param[in] cc: Event handle * \return MAC address */ esp_mac_t * esp_evt_ap_ip_sta_get_mac(esp_cb_t* cc) { return cc->cb.ap_ip_sta.mac; } /** * \brief Get IP address from station * \param[in] cc: Event handle * \return IP address */ esp_ip_t * esp_evt_ap_ip_sta_get_ip(esp_cb_t* cc) { return cc->cb.ap_ip_sta.ip; } /** * \brief Get MAC address from connected station * \param[in] cc: Event handle * \return MAC address */ esp_mac_t * esp_evt_ap_connected_sta_get_mac(esp_cb_t* cc) { return cc->cb.ap_conn_disconn_sta.mac; } /** * \brief Get MAC address from disconnected station * \param[in] cc: Event handle * \return MAC address */ esp_mac_t * esp_evt_ap_disconnected_sta_get_mac(esp_cb_t* cc) { return cc->cb.ap_conn_disconn_sta.mac; } #endif /* ESP_CFG_MODE_ACCESS_POINT || __DOXYGEN__ */ /** * \brief Get buffer from received data * \param[in] cc: Event handle * \return Buffer handle */ esp_pbuf_p esp_evt_conn_data_recv_get_buff(esp_cb_t* cc) { return cc->cb.conn_data_recv.buff; } /** * \brief Get connection handle for receive * \param[in] cc: Event handle * \return Connection handle */ esp_conn_p esp_evt_conn_data_recv_get_conn(esp_cb_t* cc) { return cc->cb.conn_data_recv.conn; } /** * \brief Get connection handle for data sent event * \param[in] cc: Event handle * \return Connection handle */ esp_conn_p esp_evt_conn_data_sent_get_conn(esp_cb_t* cc) { return cc->cb.conn_data_sent.conn; } /** * \brief Get number of bytes sent on connection * \param[in] cc: Event handle * \return Number of bytes sent */ size_t esp_evt_conn_data_sent_get_length(esp_cb_t* cc) { return cc->cb.conn_data_sent.sent; } /** * \brief Get connection handle * \param[in] cc: Event handle * \return Connection handle */ esp_conn_p esp_evt_conn_data_send_err_get_conn(esp_cb_t* cc) { return cc->cb.conn_data_send_err.conn; } /** * \brief Get number of bytes successfully sent on failed send command * \param[in] cc: Event handle * \return Connection handle */ size_t esp_evt_conn_data_send_err_get_length(esp_cb_t* cc) { return cc->cb.conn_data_send_err.sent; } /** * \brief Get connection handle * \param[in] cc: Event handle * \return Connection handle */ esp_conn_p esp_evt_conn_active_get_conn(esp_cb_t* cc) { return cc->cb.conn_active_closed.conn; } /** * \brief Check if new connection is client * \param[in] cc: Event handle * \return `1` if client, `0` otherwise */ uint8_t esp_evt_conn_active_is_client(esp_cb_t* cc) { return ESP_U8(!!cc->cb.conn_active_closed.client); } /** * \brief Get connection handle * \param[in] cc: Event handle * \return Connection handle */ esp_conn_p esp_evt_conn_closed_get_conn(esp_cb_t* cc) { return cc->cb.conn_active_closed.conn; } /** * \brief Check if just closed connection was client * \param[in] cc: Event handle * \return `1` if client, `0` otherwise */ uint8_t esp_evt_conn_closed_is_client(esp_cb_t* cc) { return cc->cb.conn_active_closed.client; } /** * \brief Check if connection close even was forced by user * \param[in] cc: Event handle * \return `1` if forced, `0` otherwise */ uint8_t esp_evt_conn_closed_is_forced(esp_cb_t* cc) { return cc->cb.conn_active_closed.forced; } /** * \brief Get connection handle * \param[in] cc: Event handle * \return Connection handle */ esp_conn_p esp_evt_conn_poll_get_conn(esp_cb_t* cc) { return cc->cb.conn_poll.conn; } /** * \brief Get connection error type * \param[in] cc: Event handle * \return Member of \ref espr_t enumeration */ espr_t esp_evt_conn_error_get_error(esp_cb_t* cc) { return cc->cb.conn_error.err; } /** * \brief Get connection type * \param[in] cc: Event handle * \return Member of \ref espr_t enumeration */ esp_conn_type_t esp_evt_conn_error_get_type(esp_cb_t* cc) { return cc->cb.conn_error.type; } /** * \brief Get connection host * \param[in] cc: Event handle * \return Host name for connection */ const char * esp_evt_conn_error_get_host(esp_cb_t* cc) { return cc->cb.conn_error.host; } /** * \brief Get connection port * \param[in] cc: Event handle * \return Host port number */ esp_port_t esp_evt_conn_error_get_port(esp_cb_t* cc) { return cc->cb.conn_error.port; } /** * \brief Get user argument * \param[in] cc: Event handle * \return User argument */ void * esp_evt_conn_error_get_arg(esp_cb_t* cc) { return cc->cb.conn_error.arg; } #if ESP_CFG_MODE_STATION || __DOXYGEN__ /** * \brief Get command success status * \param[in] cc: Event handle * \return \ref espOK on success, member of \ref espr_t otherwise */ espr_t esp_evt_sta_list_ap_get_status(esp_cb_t* cc) { return cc->cb.sta_list_ap.status; } /** * \brief Get command success status * \param[in] cc: Event handle * \return Pointer to \ref esp_ap_t with first access point description */ esp_ap_t * esp_evt_sta_list_ap_get_aps(esp_cb_t* cc) { return cc->cb.sta_list_ap.aps; } /** * \brief Get number of access points found * \param[in] cc: Event handle * \return Number of access points found */ size_t esp_evt_sta_list_ap_get_length(esp_cb_t* cc) { return cc->cb.sta_list_ap.len; } /** * \brief Get command success status * \param[in] cc: Event handle * \return \ref espOK on success, member of \ref espr_t otherwise */ espr_t esp_evt_sta_join_ap_get_status(esp_cb_t* cc) { return cc->cb.sta_join_ap.status; } #endif /* ESP_CFG_MODE_STATION || __DOXYGEN__ */ #if ESP_CFG_DNS || __DOXYGEN__ /** * \brief Get resolve status * \param[in] cc: Event handle * \return \ref espOK on success, member of \ref espr_t otherwise */ espr_t esp_evt_dns_hostbyname_get_status(esp_cb_t* cc) { return cc->cb.dns_hostbyname.status; } /** * \brief Get hostname used to resolve IP address * \param[in] cc: Event handle * \return Hostname */ const char * esp_evt_dns_hostbyname_get_host(esp_cb_t* cc) { return cc->cb.dns_hostbyname.host; } /** * \brief Get IP address from DNS function * \param[in] cc: Event handle * \return IP address */ esp_ip_t * esp_evt_dns_hostbyname_get_ip(esp_cb_t* cc) { return cc->cb.dns_hostbyname.ip; } #endif /* ESP_CFG_DNS || __DOXYGEN__ */ #if ESP_CFG_PING || __DOXYGEN__ /** * \brief Get ping status * \param[in] cc: Event handle * \return \ref espOK on success, member of \ref espr_t otherwise */ espr_t esp_evt_ping_get_status(esp_cb_t* cc) { return cc->cb.ping.status; } /** * \brief Get hostname used to ping * \param[in] cc: Event handle * \return Hostname */ const char * esp_evt_ping_get_host(esp_cb_t* cc) { return cc->cb.ping.host; } /** * \brief Get time required for ping * \param[in] cc: Event handle * \return Ping time */ uint32_t esp_evt_ping_get_time(esp_cb_t* cc) { return cc->cb.ping.time; } #endif /* ESP_CFG_PING || __DOXYGEN__ */ /** * \brief Get server command result status * \param[in] cc: Event handle * \return \ref espOK on success, member of \ref espr_t otherwise */ espr_t esp_evt_server_get_status(esp_cb_t* cc) { return cc->cb.server.status; } /** * \brief Get port for server operation * \param[in] cc: Event handle * \return Server port */ esp_port_t esp_evt_server_get_port(esp_cb_t* cc) { return cc->cb.server.port; } /** * \brief Check if operation was to enable or disable server * \param[in] cc: Event handle * \return `1` if enable, `0` otherwise */ uint8_t esp_evt_server_is_enable(esp_cb_t* cc) { return cc->cb.server.en; }
4,425
4,071
/* Copyright (C) 2016-2018 Alibaba Group Holding Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef XDL_CORE_GRAPPLER_HASH_STATIS_FUSION_WORKER #define XDL_CORE_GRAPPLER_HASH_STATIS_FUSION_WORKER #include "xdl/core/framework/grappler.h" #include "xdl/core/grappler/ps_fusion_worker.h" #include <atomic> namespace xdl { class HashStatisFusionWorker: public FusionWorker { public: HashStatisFusionWorker(const std::string& statis_type) : statis_type_(statis_type) {} Status Process(GraphDef* graph, OutputSpec* output) override; private: std::string statis_type_; bool NodeMatcher(NodeDef* n) { std::string var_type; if (!GetAttrValue(n, "var_type", &var_type).IsOk()) { return false; } std::string statis_type; if (!GetAttrValue(n, "statis_type", &statis_type).IsOk()) { return false; } return n->op == "PsSparseStatisOp" && statis_type_ == statis_type && (var_type == "hash128" || var_type == "hash64" || var_type == "hash"); } Status PostCluster( const std::vector<std::set<NodeDef*> >& clusters, std::vector<std::set<NodeDef*> >* sub_clusters); Status DoFusion( const std::vector<std::set<NodeDef*> >& clusters); Status FuseOneCluster( const std::set<NodeDef*>& clusters); Status FuseImpl( const std::string& var_name_str, DataType itype, DataType otype, const std::set<NodeDef*>& cluster, NodeDef* fused_node); }; } #endif
735
1,431
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hslf.usermodel; import org.apache.poi.hslf.record.Comment2000; import org.apache.poi.sl.usermodel.Comment; import org.apache.poi.util.Units; import java.awt.geom.Point2D; import java.util.Date; public final class HSLFComment implements Comment { private final Comment2000 _comment2000; public HSLFComment(Comment2000 comment2000) { _comment2000 = comment2000; } protected Comment2000 getComment2000() { return _comment2000; } /** * Get the Author of this comment */ @Override public String getAuthor() { return _comment2000.getAuthor(); } /** * Set the Author of this comment */ @Override public void setAuthor(String author) { _comment2000.setAuthor(author); } /** * Get the Author's Initials of this comment */ @Override public String getAuthorInitials() { return _comment2000.getAuthorInitials(); } /** * Set the Author's Initials of this comment */ @Override public void setAuthorInitials(String initials) { _comment2000.setAuthorInitials(initials); } /** * Get the text of this comment */ @Override public String getText() { return _comment2000.getText(); } /** * Set the text of this comment */ @Override public void setText(String text) { _comment2000.setText(text); } @Override public Date getDate() { return _comment2000.getComment2000Atom().getDate(); } @Override public void setDate(Date date) { _comment2000.getComment2000Atom().setDate(date); } @Override public Point2D getOffset() { final double x = Units.masterToPoints(_comment2000.getComment2000Atom().getXOffset()); final double y = Units.masterToPoints(_comment2000.getComment2000Atom().getYOffset()); return new Point2D.Double(x, y); } @Override public void setOffset(Point2D offset) { final int x = Units.pointsToMaster(offset.getX()); final int y = Units.pointsToMaster(offset.getY()); _comment2000.getComment2000Atom().setXOffset(x); _comment2000.getComment2000Atom().setYOffset(y); } }
1,087
404
<reponame>jmcarcell/django-plotly-dash<gh_stars>100-1000 ''' Test demo appliction Most of these tests are simply the loading of the individual files that constitute the demo. A configuration failure would cause one or more of these to fail. Copyright (c) 2018 <NAME> and others - see CONTRIBUTIONS.md Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' # pylint: disable=protected-access, no-member import pytest def test_asgi_loading(): 'Test loading of a module' from ..asgi import application assert application def test_wsgi_loading(): 'Test loading of a module' from ..wsgi import application assert application def test_routing_loading(): 'Test loading of a module' from ..routing import application assert application def test_url_loading(): 'Test loading of a module' from ..urls import urlpatterns assert urlpatterns def test_demo_loading(): 'Test the import and formation of a dash example app' from ..plotly_apps import app assert app._uid == 'SimpleExample' # pylint: disable=protected-access assert app.layout @pytest.mark.django_db def test_app_lookup(): 'Test looking up an existing application' from ..plotly_apps import app from django_plotly_dash.models import get_stateless_by_name, StatelessApp app2 = get_stateless_by_name(app._uid) assert app2 assert app._uid == app2._uid app3 = StatelessApp.objects.get(app_name=app._uid) assert app3 assert app3.app_name == app2._uid def test_app_callbacks(): 'Test the callbacks of the demo applications' from ..plotly_apps import app, a2, liveIn, liveOut assert app assert a2 assert liveIn assert liveOut # TODO need something to trigger callbacks def test_stateless_lookup(): 'Test side loading of stateless apps' from django_plotly_dash.util import stateless_app_lookup_hook lh_hook = stateless_app_lookup_hook() with pytest.raises(ImportError): lh_hook("not a real app name") demo_app = lh_hook('demo_app') assert demo_app is not None assert demo_app._uid == 'name_of_demo_app'
987
412
<reponame>dave1667/graphd /* Copyright 2015 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #define _GNU_SOURCE #include "libcl/clp.h" #ifdef __GLIBC__ #include <execinfo.h> #include <dlfcn.h> #define HAVE_GLIBC_BACKTRACE #define CL_INTERNAL_STACK_DEPTH 2 #endif static void render_stacktrace(cl_handle* cl, char* buffer, size_t size) { #ifdef HAVE_GLIBC_BACKTRACE size_t backtrace_n; void* backtrace_pointers[50]; size_t co; int i; int have_syminfo; void* stack_top; Dl_info symbol; stack_top = dlsym(NULL, "main"); if (!stack_top) return; /* Impressive. Send ld-linux to the hospital now */ backtrace_n = backtrace(backtrace_pointers, sizeof(backtrace_pointers) / (sizeof(void*))); co = strlen(buffer); /* * Start at two because there's at least two cl_error functions * including this one. */ for (i = CL_INTERNAL_STACK_DEPTH; i < backtrace_n; i++) { if (!backtrace_pointers[i]) break; have_syminfo = dladdr(backtrace_pointers[i], &symbol); /* * Don't crawl beyond main() into libc */ if (have_syminfo && (symbol.dli_saddr == stack_top)) return; if (i == CL_INTERNAL_STACK_DEPTH) { /* First time. Add a prompt. */ if (size - co > sizeof("\nStacktrace: ")) { snprintf(buffer + co, size - co, "\nStacktrace: "); co += strlen(buffer + co); } } else { if (co < (size - 2)) { buffer[co] = ','; buffer[co + 1] = ' '; co += 2; } } if (!have_syminfo || !symbol.dli_sname) { const char* file = NULL; /* * If we found the symbol, but don't have * a name for it, use the file name and * an absolute address, if we can. * * If we got nothing at all, just use the * absolute address. */ if (have_syminfo && symbol.dli_fname) { /* * Only the last part of a file * name is interesting */ file = strrchr(symbol.dli_fname, '/'); if (!file) file = symbol.dli_fname; else file++; } snprintf(buffer + co, size - co, "%s[%p]", file ? file : "", backtrace_pointers[i]); co += strlen(buffer + co); } else { snprintf(buffer + co, size - co, "%s+%i", symbol.dli_sname, (int)(backtrace_pointers[i] - symbol.dli_saddr)); co += strlen(buffer + co); } } #endif return; } void cl_vlog_func(cl_handle* cl, cl_loglevel level, char const* func, bool entering, char const* fmt, va_list ap) { char bigbuf[16 * 1024]; va_list aq; char* buf_ptr = bigbuf; size_t buf_size = sizeof bigbuf; size_t indent = 0; if (!cl_is_logged(cl, level)) return; /* Low-level (debug and below) messages are * indented according to the current enter/leave * nesting depth. */ if (cl != NULL && !CL_IS_LOGGED(CL_LEVEL_DETAIL, level)) indent = cl->cl_indent; if (indent >= sizeof(bigbuf) / 2) indent = sizeof(bigbuf) / 2; if (indent > 0) memset(bigbuf, ' ', indent); if (func) { snprintf(buf_ptr + indent, buf_size - indent, "%c %s ", entering ? '{' : '}', func); indent += strlen(buf_ptr + indent); } for (;;) { va_copy(aq, ap); vsnprintf(buf_ptr + indent, buf_size - indent, fmt, aq); va_end(aq); buf_ptr[buf_size - 1] = '\0'; /* We fit it all in? */ if (strlen(buf_ptr) < buf_size - 1) break; /* Double the buffer size and try again. */ if (buf_ptr == bigbuf) { buf_ptr = malloc(buf_size * 2); if (buf_ptr == NULL) { buf_ptr = bigbuf; strncpy(buf_ptr + buf_size - 6, "[...]", 6); break; } if (indent > 0) memcpy(buf_ptr, bigbuf, indent); } else { char* tmp = realloc(buf_ptr, buf_size * 2); if (tmp == NULL) { strncpy(buf_ptr + buf_size - 6, "[...]", 6); break; } buf_ptr = tmp; } buf_size *= 2; } if (CL_IS_LOGGED(CL_LEVEL_ERROR, level) && cl->cl_stacktrace) render_stacktrace(cl, buf_ptr, buf_size); if (cl) { (*cl->cl_write)(cl->cl_write_data, level, buf_ptr); if (cl->cl_siphon && CL_IS_LOGGED(cl->cl_siphon_level, level)) (*cl->cl_siphon)(cl->cl_siphon_data, level, buf_ptr); } else (void)fprintf(stderr, "%s\n(and by the way, your " "log handle is NULL, too.)\n", buf_ptr); if (buf_ptr != bigbuf) free(buf_ptr); } /** * @brief Log a message. * This is the explicit var-args version of cl_log(). * * @param cl a log-handle created with cl_create(). * @param level the loglevel of the message, e.g. CL_LEVEL_DEBUG. * @param fmt a printf-style format string. * @param ap va_start()ed arguments for the format string. */ void cl_vlog(cl_handle* cl, cl_loglevel level, char const* fmt, va_list ap) { cl_vlog_func(cl, level, (char*)0, 0, fmt, ap); }
2,383
32,544
<reponame>DBatOWL/tutorials package com.baeldung.application.repositories; import com.baeldung.application.entities.User; import org.springframework.data.repository.CrudRepository; import org.springframework.stereotype.Repository; import org.springframework.web.bind.annotation.CrossOrigin; @Repository public interface UserRepository extends CrudRepository<User, Long>{}
119
12,252
<gh_stars>1000+ /* * Copyright 2017 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.cluster.infinispan; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import org.infinispan.Cache; import org.jboss.logging.Logger; import org.junit.Assert; import org.keycloak.common.util.Time; import org.keycloak.connections.infinispan.InfinispanConnectionProvider; import org.keycloak.models.sessions.infinispan.changes.SessionEntityWrapper; import org.keycloak.models.sessions.infinispan.entities.AuthenticatedClientSessionEntity; import org.keycloak.models.sessions.infinispan.entities.UserSessionEntity; import org.keycloak.models.sessions.infinispan.initializer.DistributedCacheConcurrentWritesTest; /** * @author <a href="mailto:<EMAIL>"><NAME></a> */ public class ConcurrencyDistributedRemoveSessionTest { protected static final Logger logger = Logger.getLogger(ConcurrencyJDGRemoveSessionTest.class); private static final int ITERATIONS = 10000; private static final AtomicInteger errorsCounter = new AtomicInteger(0); private static final AtomicInteger successfulListenerWrites = new AtomicInteger(0); private static final AtomicInteger successfulListenerWrites2 = new AtomicInteger(0); private static Map<String, AtomicInteger> removalCounts = new ConcurrentHashMap<>(); private static final UUID CLIENT_1_UUID = UUID.randomUUID(); public static void main(String[] args) throws Exception { Cache<String, SessionEntityWrapper<UserSessionEntity>> cache1 = DistributedCacheConcurrentWritesTest.createManager("node1").getCache(InfinispanConnectionProvider.USER_SESSION_CACHE_NAME); Cache<String, SessionEntityWrapper<UserSessionEntity>> cache2 = DistributedCacheConcurrentWritesTest.createManager("node2").getCache(InfinispanConnectionProvider.USER_SESSION_CACHE_NAME); // Create caches, listeners and finally worker threads Thread worker1 = createWorker(cache1, 1); Thread worker2 = createWorker(cache2, 2); Thread worker3 = createWorker(cache1, 1); Thread worker4 = createWorker(cache2, 2); // Create 100 initial sessions for (int i=0 ; i<ITERATIONS ; i++) { String sessionId = String.valueOf(i); SessionEntityWrapper<UserSessionEntity> wrappedSession = createSessionEntity(sessionId); cache1.put(sessionId, wrappedSession); removalCounts.put(sessionId, new AtomicInteger(0)); } logger.info("SESSIONS CREATED"); // Create 100 initial sessions for (int i=0 ; i<ITERATIONS ; i++) { String sessionId = String.valueOf(i); SessionEntityWrapper loadedWrapper = cache2.get(sessionId); Assert.assertNotNull("Loaded wrapper for key " + sessionId, loadedWrapper); } logger.info("SESSIONS AVAILABLE ON DC2"); long start = System.currentTimeMillis(); try { worker1.start(); worker2.start(); worker3.start(); worker4.start(); worker1.join(); worker2.join(); worker3.join(); worker4.join(); logger.info("SESSIONS REMOVED"); Map<Integer, Integer> histogram = new HashMap<>(); for (Map.Entry<String, AtomicInteger> entry : removalCounts.entrySet()) { int count = entry.getValue().get(); int current = histogram.get(count) == null ? 0 : histogram.get(count); current++; histogram.put(count, current); } logger.infof("Histogram: %s", histogram.toString()); logger.infof("Errors: %d", errorsCounter.get()); long took = System.currentTimeMillis() - start; logger.infof("took %d ms", took); } finally { Thread.sleep(2000); // Finish JVM cache1.getCacheManager().stop(); cache2.getCacheManager().stop(); } } private static SessionEntityWrapper<UserSessionEntity> createSessionEntity(String sessionId) { // Create 100 initial sessions UserSessionEntity session = new UserSessionEntity(); session.setId(sessionId); session.setRealmId("foo"); session.setBrokerSessionId("!23123123"); session.setBrokerUserId(null); session.setUser("foo"); session.setLoginUsername("foo"); session.setIpAddress("192.168.3.11"); session.setStarted(Time.currentTime()); session.setLastSessionRefresh(Time.currentTime()); AuthenticatedClientSessionEntity clientSession = new AuthenticatedClientSessionEntity(UUID.randomUUID()); clientSession.setAuthMethod("saml"); clientSession.setAction("something"); clientSession.setTimestamp(1234); session.getAuthenticatedClientSessions().put(CLIENT_1_UUID.toString(), clientSession.getId()); SessionEntityWrapper<UserSessionEntity> wrappedSession = new SessionEntityWrapper<>(session); return wrappedSession; } private static Thread createWorker(Cache<String, SessionEntityWrapper<UserSessionEntity>> cache, int threadId) { System.out.println("Retrieved cache: " + threadId); return new CacheWorker(cache, threadId); } private static class CacheWorker extends Thread { private final Cache<String, Object> cache; private final int myThreadId; private CacheWorker(Cache cache, int myThreadId) { this.cache = cache; this.myThreadId = myThreadId; } @Override public void run() { for (int i=0 ; i<ITERATIONS ; i++) { String sessionId = String.valueOf(i); Object o = cache.remove(sessionId); if (o != null) { removalCounts.get(sessionId).incrementAndGet(); } } } } }
2,536
2,673
#!/usr/bin/env python3 import unittest import torch from gpytorch.optim import NGD from gpytorch.test.base_test_case import BaseTestCase class TestNGD(unittest.TestCase, BaseTestCase): def test_ngd_step_no_groups(self): parameters = [ torch.nn.Parameter(torch.tensor(2.5)), torch.nn.Parameter(torch.tensor([1.0, -0.5])), ] # parameters[0].grad = torch.tensor(1.) # parameters[1].grad = torch.tensor([2., -1.]) optimizer = NGD(parameters, num_data=5, lr=0.1) optimizer.zero_grad() loss = parameters[0] + torch.dot(parameters[1], torch.tensor([2.0, -1])) loss.backward() optimizer.step() self.assertAllClose(parameters[0], torch.tensor(2.0)) self.assertAllClose(parameters[1], torch.tensor([0.0, 0.0])) def test_ngd_step_groups(self): parameters = [ {"params": [torch.nn.Parameter(torch.tensor(2.5))], "lr": 0.2}, {"params": [torch.nn.Parameter(torch.tensor([1.0, -0.5]))]}, ] optimizer = NGD(parameters, num_data=5, lr=0.1) optimizer.zero_grad() loss = parameters[0]["params"][0] + torch.dot(parameters[1]["params"][0], torch.tensor([2.0, -1])) loss.backward() optimizer.step() self.assertAllClose(parameters[0]["params"][0], torch.tensor(1.5)) self.assertAllClose(parameters[1]["params"][0], torch.tensor([0.0, 0.0]))
696
1,483
/* * Copyright Lealone Database Group. * Licensed under the Server Side Public License, v 1. * Initial Developer: zhh */ package org.lealone.storage; import java.nio.ByteBuffer; import java.util.Map; import java.util.Set; import org.lealone.common.exceptions.DbException; import org.lealone.db.IDatabase; import org.lealone.db.RunMode; import org.lealone.storage.type.ObjectDataType; import org.lealone.storage.type.StorageDataType; public interface Storage { default <K, V> StorageMap<K, V> openMap(String name, Map<String, String> parameters) { return openMap(name, new ObjectDataType(), new ObjectDataType(), parameters); } <K, V> StorageMap<K, V> openMap(String name, StorageDataType keyType, StorageDataType valueType, Map<String, String> parameters); void closeMap(String name); boolean hasMap(String name); StorageMap<?, ?> getMap(String name); Set<String> getMapNames(); String nextTemporaryMapName(); String getStoragePath(); boolean isInMemory(); long getDiskSpaceUsed(); long getMemorySpaceUsed(); void save(); void drop(); void backupTo(String fileName); void close(); void closeImmediately(); boolean isClosed(); void registerEventListener(StorageEventListener listener); void unregisterEventListener(StorageEventListener listener); default void replicateFrom(ByteBuffer data) { throw DbException.getUnsupportedException("replicateFrom"); } default void scaleOut(IDatabase db, RunMode oldRunMode, RunMode newRunMode, String[] oldNodes, String[] newNodes) { throw DbException.getUnsupportedException("scaleOut"); } default void scaleIn(IDatabase db, RunMode oldRunMode, RunMode newRunMode, String[] oldNodes, String[] newNodes) { throw DbException.getUnsupportedException("scaleIn"); } }
614
1,208
// // TLMobileContactHelper.h // TLChat // // Created by 李伯坤 on 2018/1/9. // Copyright © 2018年 李伯坤. All rights reserved. // #import <Foundation/Foundation.h> #import "TLMobileContactModel.h" @interface TLMobileContactHelper : NSObject /** * 获取通讯录好友 * * @param success 获取成功,异步返回(通讯录列表,格式化的通讯录列表,格式化的通讯录列表组标题) * @param failed 获取失败 */ + (void)tryToGetAllContactsSuccess:(void (^)(NSArray *data, NSArray *formatData, NSArray *headers))success failed:(void (^)())failed; @end
307
4,268
<reponame>wenq1/duktape /* * Test error .fileName / .lineNumber blaming. * * Try to cover all the C code paths. Must test with and without tracebacks * separately, as the code paths are different. */ /*=== *** test_empty_1 (duk_safe_call) dummy_source.js 4 final top: 1 ==> rc=0, result='undefined' *** test_empty_2 (duk_safe_call) dummy.c 1234 final top: 1 ==> rc=0, result='undefined' *** test_empty_3 (duk_safe_call) dummy.c 2345 final top: 1 ==> rc=0, result='undefined' *** test_empty_4 (duk_safe_call) undefined undefined final top: 1 ==> rc=0, result='undefined' *** test_nofile_1 (duk_safe_call) delete: true dummy_source.js 4 final top: 1 ==> rc=0, result='undefined' *** test_nofile_2 (duk_safe_call) delete: true dummy.c 1234 final top: 1 ==> rc=0, result='undefined' *** test_nofile_3 (duk_safe_call) delete: true dummy.c 2345 final top: 1 ==> rc=0, result='undefined' *** test_nofile_4 (duk_safe_call) delete: true undefined undefined final top: 1 ==> rc=0, result='undefined' *** test_havefile1_1 (duk_safe_call) dummy_source.js 4 final top: 1 ==> rc=0, result='undefined' *** test_havefile1_2 (duk_safe_call) dummy.c 1234 final top: 1 ==> rc=0, result='undefined' *** test_havefile1_3 (duk_safe_call) dummy.c 2345 final top: 1 ==> rc=0, result='undefined' *** test_havefile1_4 (duk_safe_call) dummy_filename.js 2 final top: 1 ==> rc=0, result='undefined' *** test_havefile2_1 (duk_safe_call) delete: true dummy_source.js 4 final top: 1 ==> rc=0, result='undefined' *** test_havefile2_2 (duk_safe_call) delete: true dummy.c 1234 final top: 1 ==> rc=0, result='undefined' *** test_havefile2_3 (duk_safe_call) delete: true dummy.c 2345 final top: 1 ==> rc=0, result='undefined' *** test_havefile2_4 (duk_safe_call) delete: true dummy_filename.c 0 final top: 1 ==> rc=0, result='undefined' *** test_deep_1a (duk_safe_call) delete: true target depth: 9 dummy_source.js 4 final top: 1 ==> rc=0, result='undefined' *** test_deep_1b (duk_safe_call) delete: true target depth: 10 dummy_source.js 4 final top: 1 ==> rc=0, result='undefined' *** test_deep_1c (duk_safe_call) delete: true target depth: 11 dummy_source.js 4 final top: 1 ==> rc=0, result='undefined' *** test_deep_1d (duk_safe_call) delete: true target depth: 50 dummy_source.js 4 final top: 1 ==> rc=0, result='undefined' *** test_deep_2a (duk_safe_call) delete: true target depth: 9 dummy.c 1234 final top: 1 ==> rc=0, result='undefined' *** test_deep_2b (duk_safe_call) delete: true target depth: 10 dummy.c 1234 final top: 1 ==> rc=0, result='undefined' *** test_deep_2c (duk_safe_call) delete: true target depth: 11 dummy.c 1234 final top: 1 ==> rc=0, result='undefined' *** test_deep_2d (duk_safe_call) delete: true target depth: 50 dummy.c 1234 final top: 1 ==> rc=0, result='undefined' *** test_deep_3a (duk_safe_call) delete: true target depth: 9 dummy.c 2345 final top: 1 ==> rc=0, result='undefined' *** test_deep_3b (duk_safe_call) delete: true target depth: 10 dummy.c 2345 final top: 1 ==> rc=0, result='undefined' *** test_deep_3c (duk_safe_call) delete: true target depth: 11 dummy.c 2345 final top: 1 ==> rc=0, result='undefined' *** test_deep_3d (duk_safe_call) delete: true target depth: 50 dummy.c 2345 final top: 1 ==> rc=0, result='undefined' *** test_deep_4a (duk_safe_call) delete: true target depth: 9 outer_limits.c 0 final top: 1 ==> rc=0, result='undefined' *** test_deep_4b (duk_safe_call) delete: true target depth: 10 outer_limits.c 0 final top: 1 ==> rc=0, result='undefined' *** test_deep_4c (duk_safe_call) delete: true target depth: 11 undefined undefined final top: 1 ==> rc=0, result='undefined' *** test_deep_4d (duk_safe_call) delete: true target depth: 50 undefined undefined final top: 1 ==> rc=0, result='undefined' ===*/ /* * Helpers */ static duk_c_function target_func_hack; static int depth_hack; static duk_ret_t my_thrower_1(duk_context *ctx) { /* When an error is thrown during compilation, the source file/line * is always blamed. */ duk_push_string(ctx, "\n\n\nfoo="); duk_push_string(ctx, "dummy_source.js"); duk_compile(ctx, DUK_COMPILE_EVAL); duk_call(ctx, 0); return 0; } static duk_ret_t my_thrower_1_safecall(duk_context *ctx, void *udata) { (void) udata; return my_thrower_1(ctx); } static duk_ret_t my_thrower_2(duk_context *ctx) { /* When an error is thrown using duk_error(), the __FILE__ and __LINE__ * of the throw site gets blamed for the error w.r.t. to .fileName and * .lineNumber. */ #line 1234 "dummy.c" duk_error(ctx, DUK_ERR_RANGE_ERROR, "user error"); return 0; } static duk_ret_t my_thrower_2_safecall(duk_context *ctx, void *udata) { (void) udata; return my_thrower_2(ctx); } static duk_ret_t my_thrower_3(duk_context *ctx) { /* When an error is constructed using duk_push_error_object() and then * thrown, the same thing happens as with duk_error(). */ #line 2345 "dummy.c" duk_push_error_object(ctx, DUK_ERR_RANGE_ERROR, "user error"); duk_throw(ctx); return 0; } static duk_ret_t my_thrower_3_safecall(duk_context *ctx, void *udata) { (void) udata; return my_thrower_3(ctx); } static duk_ret_t my_thrower_4(duk_context *ctx) { /* When an error is thrown from inside Duktape (which is always * considered "infrastructure code") the __FILE__ and __LINE__ * are recorded in the traceback but not blamed as file/line. */ duk_push_undefined(ctx); duk_require_string(ctx, -1); return 0; } static duk_ret_t my_thrower_4_safecall(duk_context *ctx, void *udata) { (void) udata; return my_thrower_4(ctx); } /* * Empty callstack */ static duk_ret_t empty_helper(duk_context *ctx, duk_safe_call_function target_func) { duk_int_t rc; rc = duk_safe_call(ctx, target_func, NULL, 0, 1); (void) rc; duk_eval_string(ctx, "(function (e) { print(e.fileName, e.lineNumber); if (PRINT_STACK) { print(e.stack); } })"); duk_dup(ctx, -2); duk_call(ctx, 1); duk_pop(ctx); printf("final top: %ld\n", (long) duk_get_top(ctx)); return 0; } static duk_ret_t test_empty_1(duk_context *ctx, void *udata) { (void) udata; return empty_helper(ctx, my_thrower_1_safecall); } static duk_ret_t test_empty_2(duk_context *ctx, void *udata) { (void) udata; return empty_helper(ctx, my_thrower_2_safecall); } static duk_ret_t test_empty_3(duk_context *ctx, void *udata) { (void) udata; return empty_helper(ctx, my_thrower_3_safecall); } static duk_ret_t test_empty_4(duk_context *ctx, void *udata) { (void) udata; return empty_helper(ctx, my_thrower_4_safecall); } /* * Callstack has entries but nothing with a .fileName. */ static duk_ret_t nofile_helper_2(duk_context *ctx) { duk_push_string(ctx, "(function () {\n" " var fn = function noFileName(v) { v(); return 123; };\n" " print('delete: ' + delete fn.fileName);\n" " return fn;\n" "})()"); duk_push_string(ctx, "dummy_filename.js"); duk_compile(ctx, DUK_COMPILE_EVAL); duk_call(ctx, 0); duk_push_c_function(ctx, target_func_hack, 0); duk_call(ctx, 1); return 0; } static duk_ret_t nofile_helper(duk_context *ctx, duk_c_function target_func) { duk_int_t rc; target_func_hack = target_func; duk_push_c_function(ctx, nofile_helper_2, 0); /* Duktape/C func with no .fileName */ duk_pcall(ctx, 0); (void) rc; duk_eval_string(ctx, "(function (e) { print(e.fileName, e.lineNumber); if (PRINT_STACK) { print(e.stack); } })"); duk_dup(ctx, -2); duk_call(ctx, 1); duk_pop(ctx); printf("final top: %ld\n", (long) duk_get_top(ctx)); return 0; } static duk_ret_t test_nofile_1(duk_context *ctx, void *udata) { (void) udata; return nofile_helper(ctx, my_thrower_1); } static duk_ret_t test_nofile_2(duk_context *ctx, void *udata) { (void) udata; return nofile_helper(ctx, my_thrower_2); } static duk_ret_t test_nofile_3(duk_context *ctx, void *udata) { (void) udata; return nofile_helper(ctx, my_thrower_3); } static duk_ret_t test_nofile_4(duk_context *ctx, void *udata) { (void) udata; return nofile_helper(ctx, my_thrower_4); } /* * Callstack has entries with .fileName, but the innermost function * does not have a filename. */ static duk_ret_t havefile1_helper_2(duk_context *ctx) { duk_push_string(ctx, "(function () {\n" " var fn = function haveFileName(v) { v(); return 123; };\n" " return fn;\n" "})()"); duk_push_string(ctx, "dummy_filename.js"); duk_compile(ctx, DUK_COMPILE_EVAL); duk_call(ctx, 0); duk_push_c_function(ctx, target_func_hack, 0); duk_call(ctx, 1); return 0; } static duk_ret_t havefile1_helper(duk_context *ctx, duk_c_function target_func) { duk_int_t rc; target_func_hack = target_func; duk_push_c_function(ctx, havefile1_helper_2, 0); /* Duktape/C func with no .fileName */ duk_pcall(ctx, 0); (void) rc; duk_eval_string(ctx, "(function (e) { print(e.fileName, e.lineNumber); if (PRINT_STACK) { print(e.stack); } })"); duk_dup(ctx, -2); duk_call(ctx, 1); duk_pop(ctx); printf("final top: %ld\n", (long) duk_get_top(ctx)); return 0; } static duk_ret_t test_havefile1_1(duk_context *ctx, void *udata) { (void) udata; return havefile1_helper(ctx, my_thrower_1); } static duk_ret_t test_havefile1_2(duk_context *ctx, void *udata) { (void) udata; return havefile1_helper(ctx, my_thrower_2); } static duk_ret_t test_havefile1_3(duk_context *ctx, void *udata) { (void) udata; return havefile1_helper(ctx, my_thrower_3); } static duk_ret_t test_havefile1_4(duk_context *ctx, void *udata) { (void) udata; return havefile1_helper(ctx, my_thrower_4); } /* * Callstack has entries with .fileName, and the innermost function * also has a filename. */ static duk_ret_t havefile2_helper_2(duk_context *ctx) { duk_push_string(ctx, "(function () {\n" " var fn = function noFileName(v) { v(); return 123; };\n" " print('delete: ' + delete fn.fileName);\n" " return fn;\n" "})()"); duk_push_string(ctx, "dummy_filename.js"); duk_compile(ctx, DUK_COMPILE_EVAL); duk_call(ctx, 0); duk_push_c_function(ctx, target_func_hack, 0); duk_push_string(ctx, "fileName"); duk_push_string(ctx, "dummy_filename.c"); duk_def_prop(ctx, -3, DUK_DEFPROP_HAVE_VALUE | DUK_DEFPROP_SET_WRITABLE | DUK_DEFPROP_SET_CONFIGURABLE); duk_call(ctx, 1); return 0; } static duk_ret_t havefile2_helper(duk_context *ctx, duk_c_function target_func) { duk_int_t rc; target_func_hack = target_func; duk_push_c_function(ctx, havefile2_helper_2, 0); /* Duktape/C func with no .fileName */ duk_pcall(ctx, 0); (void) rc; duk_eval_string(ctx, "(function (e) { print(e.fileName, e.lineNumber); if (PRINT_STACK) { print(e.stack); } })"); duk_dup(ctx, -2); duk_call(ctx, 1); duk_pop(ctx); printf("final top: %ld\n", (long) duk_get_top(ctx)); return 0; } static duk_ret_t test_havefile2_1(duk_context *ctx, void *udata) { (void) udata; return havefile2_helper(ctx, my_thrower_1); } static duk_ret_t test_havefile2_2(duk_context *ctx, void *udata) { (void) udata; return havefile2_helper(ctx, my_thrower_2); } static duk_ret_t test_havefile2_3(duk_context *ctx, void *udata) { (void) udata; return havefile2_helper(ctx, my_thrower_3); } static duk_ret_t test_havefile2_4(duk_context *ctx, void *udata) { (void) udata; return havefile2_helper(ctx, my_thrower_4); } /* * Callstack has entries with .fileName but those entries are deeper than * the traceback depth so that they don't get blamed. * * The default callstack depth is 10, so test boundary values 9, 10, 11, * and a much deeper 50. */ static duk_ret_t deep_helper_2(duk_context *ctx) { duk_push_string(ctx, "(function () {\n" " var fn = function noFileName(n, v) { if (n > 0) { noFileName(n - 1, v); } else { v(); } return 123; };\n" " print('delete: ' + delete fn.fileName);\n" " return fn;\n" "})()"); duk_push_string(ctx, "dummy_filename.js"); duk_compile(ctx, DUK_COMPILE_EVAL); duk_call(ctx, 0); printf("target depth: %d\n", (int) depth_hack); duk_push_int(ctx, depth_hack - 3); /* account for: one func already in callstack; first call into the helper; final call to target */ duk_push_c_function(ctx, target_func_hack, 0); duk_call(ctx, 2); return 0; } static duk_ret_t deep_helper(duk_context *ctx, duk_c_function target_func, int depth) { duk_int_t rc; target_func_hack = target_func; depth_hack = depth; duk_push_c_function(ctx, deep_helper_2, 0); /* Duktape/C func with .fileName */ duk_push_string(ctx, "fileName"); duk_push_string(ctx, "outer_limits.c"); duk_def_prop(ctx, -3, DUK_DEFPROP_HAVE_VALUE | DUK_DEFPROP_SET_WRITABLE | DUK_DEFPROP_SET_CONFIGURABLE); duk_pcall(ctx, 0); (void) rc; duk_eval_string(ctx, "(function (e) { print(e.fileName, e.lineNumber); if (PRINT_STACK) { print(e.stack); } })"); duk_dup(ctx, -2); duk_call(ctx, 1); duk_pop(ctx); printf("final top: %ld\n", (long) duk_get_top(ctx)); return 0; } static duk_ret_t test_deep_1a(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_1, 9); } static duk_ret_t test_deep_1b(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_1, 10); } static duk_ret_t test_deep_1c(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_1, 11); } static duk_ret_t test_deep_1d(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_1, 50); } static duk_ret_t test_deep_2a(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_2, 9); } static duk_ret_t test_deep_2b(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_2, 10); } static duk_ret_t test_deep_2c(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_2, 11); } static duk_ret_t test_deep_2d(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_2, 50); } static duk_ret_t test_deep_3a(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_3, 9); } static duk_ret_t test_deep_3b(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_3, 10); } static duk_ret_t test_deep_3c(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_3, 11); } static duk_ret_t test_deep_3d(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_3, 50); } static duk_ret_t test_deep_4a(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_4, 9); } static duk_ret_t test_deep_4b(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_4, 10); } static duk_ret_t test_deep_4c(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_4, 11); } static duk_ret_t test_deep_4d(duk_context *ctx, void *udata) { (void) udata; return deep_helper(ctx, my_thrower_4, 50); } /* * User code can set an explicit .fileName and .lineNumber on the error * object to avoid this default blaming. */ void test(duk_context *ctx) { /* For manual testing: */ #if 0 duk_eval_string_noresult(ctx, "this.PRINT_STACK = true;"); #else duk_eval_string_noresult(ctx, "this.PRINT_STACK = false;"); #endif TEST_SAFE_CALL(test_empty_1); TEST_SAFE_CALL(test_empty_2); TEST_SAFE_CALL(test_empty_3); TEST_SAFE_CALL(test_empty_4); TEST_SAFE_CALL(test_nofile_1); TEST_SAFE_CALL(test_nofile_2); TEST_SAFE_CALL(test_nofile_3); TEST_SAFE_CALL(test_nofile_4); TEST_SAFE_CALL(test_havefile1_1); TEST_SAFE_CALL(test_havefile1_2); TEST_SAFE_CALL(test_havefile1_3); TEST_SAFE_CALL(test_havefile1_4); TEST_SAFE_CALL(test_havefile2_1); TEST_SAFE_CALL(test_havefile2_2); TEST_SAFE_CALL(test_havefile2_3); TEST_SAFE_CALL(test_havefile2_4); TEST_SAFE_CALL(test_deep_1a); TEST_SAFE_CALL(test_deep_1b); TEST_SAFE_CALL(test_deep_1c); TEST_SAFE_CALL(test_deep_1d); TEST_SAFE_CALL(test_deep_2a); TEST_SAFE_CALL(test_deep_2b); TEST_SAFE_CALL(test_deep_2c); TEST_SAFE_CALL(test_deep_2d); TEST_SAFE_CALL(test_deep_3a); TEST_SAFE_CALL(test_deep_3b); TEST_SAFE_CALL(test_deep_3c); TEST_SAFE_CALL(test_deep_3d); TEST_SAFE_CALL(test_deep_4a); TEST_SAFE_CALL(test_deep_4b); TEST_SAFE_CALL(test_deep_4c); TEST_SAFE_CALL(test_deep_4d); }
7,117
335
{ "word": "Wooden", "definitions": [ "Made of wood.", "Like or characteristic of wood.", "Stiff and awkward in movement or manner." ], "parts-of-speech": "Adjective" }
90
1,167
/**************************************************************************** * Copyright (c) 2012 Free Software Foundation, Inc. * * * * Permission is hereby granted, free of charge, to any person obtaining a * * copy of this software and associated documentation files (the * * "Software"), to deal in the Software without restriction, including * * without limitation the rights to use, copy, modify, merge, publish, * * distribute, distribute with modifications, sublicense, and/or sell * * copies of the Software, and to permit persons to whom the Software is * * furnished to do so, subject to the following conditions: * * * * The above copyright notice and this permission notice shall be included * * in all copies or substantial portions of the Software. * * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * * IN NO EVENT SHALL THE ABOVE COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR * * THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * * * Except as contained in this notice, the name(s) of the above copyright * * holders shall not be used in advertising or otherwise to promote the * * sale, use or other dealings in this Software without prior written * * authorization. * ****************************************************************************/ #ifndef __WIDECHARS_H #define __WIDECHARS_H 1 #include <test.priv.h> #if USE_WIDEC_SUPPORT #if defined(__MINGW32__) /* * MinGW has wide-character functions, but they do not work correctly. */ extern int _nc_mbtowc(wchar_t *pwc, const char *s, size_t n); extern int __MINGW_NOTHROW _nc_mbtowc(wchar_t *pwc, const char *s, size_t n); #define mbtowc(pwc,s,n) _nc_mbtowc(pwc,s,n) extern int __MINGW_NOTHROW _nc_mblen(const char *, size_t); #define mblen(s,n) _nc_mblen(s, n) #endif /* __MINGW32__ */ #if HAVE_MBTOWC && HAVE_MBLEN #define reset_mbytes(state) IGNORE_RC(mblen(NULL, 0)), IGNORE_RC(mbtowc(NULL, NULL, 0)) #define count_mbytes(buffer,length,state) mblen(buffer,length) #define check_mbytes(wch,buffer,length,state) \ (int) mbtowc(&wch, buffer, length) #define state_unused #elif HAVE_MBRTOWC && HAVE_MBRLEN #define reset_mbytes(state) init_mb(state) #define count_mbytes(buffer,length,state) mbrlen(buffer,length,&state) #define check_mbytes(wch,buffer,length,state) \ (int) mbrtowc(&wch, buffer, length, &state) #else make an error #endif #else #endif /* USE_WIDEC_SUPPORT */ extern void widechars_stub(void); #endif /* __WIDECHARS_H */
1,397
2,856
# Copyright 2021 by <NAME>. All rights reserved. # # This file is part of the Biopython distribution and governed by your # choice of the "Biopython License Agreement" or the "BSD 3-Clause License". # Please see the LICENSE file that should have been included as part of this # package. """Bio.Align support for output from <NAME>'s FASTA alignment tools. This module contains a parser for output from the FASTA programs generated with the '-m 8CB' or '-m 8CC' output formats. """ import re import enum import numpy from Bio.Align import Alignment from Bio.Align import interfaces from Bio.Seq import Seq from Bio.SeqRecord import SeqRecord class State(enum.Enum): """Enumerate alignment states needed when parsing a BTOP string.""" MATCH = enum.auto() QUERY_GAP = enum.auto() TARGET_GAP = enum.auto() NONE = enum.auto() class AlignmentIterator(interfaces.AlignmentIterator): """FASTA output alignment iterator. For reading the (pairwise) alignments from the FASTA alignment programs using the '-m 8CB' or '-m 8CC' output formats. """ def __init__(self, source): """Create an AlignmentIterator object. Arguments: - source - input data or file name """ super().__init__(source, mode="t", fmt="FASTA") stream = self.stream try: line = next(stream) except StopIteration: raise ValueError("Empty file.") from None assert line.startswith("# ") self.commandline = line[2:].strip() def parse(self, stream): """Parse the next alignment from the stream.""" if stream is None: raise StopIteration for line in stream: if line.startswith("# "): line = line.strip() if line.startswith("# FASTA processed ") and line.endswith(" queries"): return self._program = line[2:] line = next(stream) prefix = "# Query: " assert line.startswith(prefix) query_line, query_size = line[len(prefix) :].strip().rsplit(" - ", 1) query_size, unit = query_size.split() self._query_size = int(query_size) assert unit in ("nt", "aa") try: self._query_id, self._query_description = query_line.split(None, 1) except ValueError: self._query_id = query_line.strip() self._query_description = None line = next(stream) prefix = "# Database: " assert line.startswith(prefix) self._database = line[len(prefix) :].strip() line = next(stream) prefix = "# Fields: " assert line.startswith(prefix) fields = line[len(prefix) :].strip().split(", ") assert fields[0] == "query id" assert fields[1] == "subject id" assert fields[2] == "% identity" assert fields[3] == "alignment length" assert fields[4] == "mismatches" assert fields[5] == "gap opens" assert fields[6] == "q. start" assert fields[7] == "q. end" assert fields[8] == "s. start" assert fields[9] == "s. end" assert fields[10] == "evalue" assert fields[11] == "bit score" if fields[12] == "BTOP": self._alignment_representation = "BTOP" elif fields[12] == "aln_code": self._alignment_representation = "CIGAR" else: raise ValueError("Unexpected field '%s'" % fields[12]) line = next(stream) line = line.strip() assert line.startswith("# ") suffix = " hits found" assert line.endswith(suffix) hits = int(line[2 : -len(suffix)]) else: yield self.create_alignment(line) def create_alignment(self, line): """Parse one line of FASTA output and return an Alignment object.""" columns = line.split() assert len(columns) == 13 annotations = {} annotations["program"] = self._program annotations["database"] = self._database if self._query_id is not None: assert columns[0] == self._query_id query_id = columns[0] target_id = columns[1] percentage_identity = float(columns[2]) alignment_length = int(columns[3]) mismatches = int(columns[4]) matches = alignment_length - mismatches difference = abs(100 * matches / alignment_length - percentage_identity) assert difference < 0.015 gap_opens = int(columns[5]) query_start = int(columns[6]) - 1 query_end = int(columns[7]) target_start = int(columns[8]) - 1 target_end = int(columns[9]) annotations["mismatches"] = mismatches annotations["evalue"] = float(columns[10]) annotations["bit_score"] = float(columns[11]) if self._alignment_representation == "BTOP": coordinates = self.parse_btop(columns[12]) elif self._alignment_representation == "CIGAR": coordinates = self.parse_cigar(columns[12]) coordinates[0, :] += target_start if query_start < query_end: coordinates[1, :] += query_start else: # mapped to reverse strand coordinates[1, :] = query_start - coordinates[1, :] + 1 query_size = self._query_size query_sequence = Seq(None, length=query_size) query = SeqRecord(query_sequence, id=query_id) if self._query_description is not None: query.description = self._query_description target_sequence = Seq(None, length=target_end) target = SeqRecord(target_sequence, id=target_id) records = [target, query] alignment = Alignment(records, coordinates) alignment.annotations = annotations return alignment def parse_btop(self, btop): """Parse a BTOP string and return alignment coordinates. A BTOP (Blast trace-back operations) string is used by BLAST to describe a sequence alignment. """ target_coordinates = [] query_coordinates = [] target_coordinates.append(0) query_coordinates.append(0) state = State.NONE tokens = re.findall("([A-Z-]{2}|\\d+)", btop) # each token is now # - an integer # - a pair of characters, which may include dashes for token in tokens: if token.startswith("-"): if state != State.QUERY_GAP: target_coordinates.append(target_coordinates[-1]) query_coordinates.append(query_coordinates[-1]) state = State.QUERY_GAP target_coordinates[-1] += 1 elif token.endswith("-"): if state != State.TARGET_GAP: target_coordinates.append(target_coordinates[-1]) query_coordinates.append(query_coordinates[-1]) state = State.TARGET_GAP query_coordinates[-1] += 1 else: try: length = int(token) except ValueError: # pair of mismatched letters length = 1 if state == State.MATCH: target_coordinates[-1] += length query_coordinates[-1] += length else: target_coordinates.append(target_coordinates[-1] + length) query_coordinates.append(query_coordinates[-1] + length) state = State.MATCH coordinates = numpy.array([target_coordinates, query_coordinates]) return coordinates def parse_cigar(self, cigar): """Parse a CIGAR string and return alignment coordinates. A CIGAR string, as defined by the SAM Sequence Alignment/Map format, describes a sequence alignment as a series of lengths and operation (alignment/insertion/deletion) codes. """ target_coordinates = [] query_coordinates = [] target_coordinate = 0 query_coordinate = 0 target_coordinates.append(target_coordinate) query_coordinates.append(query_coordinate) state = State.NONE tokens = re.findall("(M|D|I|\\d+)", cigar) # each token is now # - the length of the operation # - the operation for length, operation in zip(tokens[::2], tokens[1::2]): length = int(length) if operation == "M": target_coordinate += length query_coordinate += length elif operation == "I": target_coordinate += length elif operation == "D": query_coordinate += length target_coordinates.append(target_coordinate) query_coordinates.append(query_coordinate) coordinates = numpy.array([target_coordinates, query_coordinates]) return coordinates
4,350
965
CMap<int, int, CPoint, CPoint> myMap; // Add 10 elements to the map. for (int i = 0; i < 10; i++) myMap.SetAt(i, CPoint(i, i)); myMap.RemoveAll(); ASSERT(myMap.IsEmpty());
77
343
<gh_stars>100-1000 // Copyright 2012 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "syzygy/experimental/pdb_dumper/pdb_module_info_stream_dumper.h" #include "syzygy/common/align.h" #include "syzygy/experimental/pdb_dumper/pdb_dump_util.h" #include "syzygy/experimental/pdb_dumper/pdb_symbol_record_dumper.h" #include "syzygy/pdb/pdb_dbi_stream.h" #include "syzygy/pdb/pdb_stream.h" #include "syzygy/pdb/pdb_stream_reader.h" #include "syzygy/pdb/pdb_symbol_record.h" #include "syzygy/pe/cvinfo_ext.h" namespace pdb { namespace cci = Microsoft_Cci_Pdb; namespace { // Read the file checksum substream from a module info stream. The filenames // used by this module will be stored in a map. // @param file_names The map containing the filenames listed in the name stream // of the PDB. // @param stream The stream containing the checksum substream. // @param length The length of the checksum substream. // @param module_files The map where the filenames should be saved. // @returns true on success, false on error. bool ReadFileChecksums(const OffsetStringMap& file_names, pdb::PdbStreamReaderWithPosition* reader, size_t length, OffsetStringMap* module_files) { DCHECK(reader != NULL); DCHECK(module_files != NULL); size_t base = reader->Position(); size_t end = base + length; common::BinaryStreamParser parser(reader); while (reader->Position() < end) { cci::CV_FileCheckSum checksum = {}; size_t pos = reader->Position() - base; if (!parser.Read(&checksum)) { LOG(ERROR) << "Unable to read file checksum."; return false; } OffsetStringMap::const_iterator it(file_names.find(checksum.name)); if (it == file_names.end()) { LOG(ERROR) << "There is a checksum reference for a file that is not in " << "the list of files used by this module."; return false; } module_files->insert(std::make_pair(pos, it->second)); // Skip the checksum and align. if (!reader->Consume(checksum.len) || !parser.AlignTo(4)) { LOG(ERROR) << "Unable to seek past file checksum."; return false; } } return true; } // Dump the line information from a line information substream. // @param file_names The map containing the filenames used by this module. // @param out The output where the data should be dumped. // @param stream The stream containing the line information. // @param length The length of the line information substream. // @param indent_level The indentation level to use. // @returns true on success, false on error. bool DumpLineInfo(const OffsetStringMap& file_names, FILE* out, pdb::PdbStreamReaderWithPosition* reader, size_t length, uint8_t indent_level) { DCHECK(reader != NULL); size_t base = reader->Position(); // Read the header. common::BinaryStreamParser parser(reader); cci::CV_LineSection line_section = {}; if (!parser.Read(&line_section)) { LOG(ERROR) << "Unable to read line section."; return false; } size_t end = base + length; while (reader->Position() < end) { cci::CV_SourceFile source_file = {}; if (!parser.Read(&source_file)) { LOG(ERROR) << "Unable to read source info."; return false; } std::vector<cci::CV_Line> lines(source_file.count); if (lines.size() && !parser.ReadMultiple(lines.size(), &lines)) { LOG(ERROR) << "Unable to read line records."; return false; } std::vector<cci::CV_Column> columns(source_file.count); if ((line_section.flags & cci::CV_LINES_HAVE_COLUMNS) != 0 && !parser.ReadMultiple(columns.size(), &columns)) { LOG(ERROR) << "Unable to read column records."; return false; } OffsetStringMap::const_iterator it(file_names.find(source_file.index)); if (it == file_names.end()) { LOG(ERROR) << "Unable to find an index in the list of filenames used by " << "this module."; return false; } DumpIndentedText(out, indent_level, "Section %d, offset 0x%04X.\n", line_section.sec, line_section.off); for (size_t i = 0; i < lines.size(); ++i) { if (columns[i].offColumnStart != 0) { DumpIndentedText(out, indent_level, "%s(%d, %d): line and column at %d:%04X.\n", it->second.c_str(), lines[i].flags & cci::linenumStart, columns[i].offColumnStart, line_section.sec, line_section.off + lines[i].offset); } else { DumpIndentedText(out, indent_level, "%s(%d): line at %d:%04X.\n", it->second.c_str(), lines[i].flags & cci::linenumStart, line_section.sec, line_section.off + lines[i].offset); } } } return true; } // Dump the line information substream from a module info stream. // @param name_map The map containing the filenames listed in the name stream of // the PDB. // @param out The output where the data should be dumped. // @param stream The stream containing the line information. // @param start The position where the line information start in the stream. // @param lines_bytes The length of the line information substream. // @param indent_level The level of indentation to use. void DumpLines(const OffsetStringMap& name_map, FILE* out, pdb::PdbStream* stream, size_t start, size_t lines_bytes, uint8_t indent_level) { DCHECK(stream != NULL); if (lines_bytes == 0) return; // The line information is arranged as a back-to-back run of {type, len} // prefixed chunks. The types are DEBUG_S_FILECHKSMS and DEBUG_S_LINES. // The first of these provides file names and a file content checksum, where // each record is identified by its index into its chunk (excluding type // and len). pdb::PdbStreamReaderWithPosition reader(start, lines_bytes, stream); common::BinaryStreamParser parser(&reader); OffsetStringMap file_names; while (!reader.AtEnd()) { uint32_t line_info_type = 0; uint32_t length = 0; if (!parser.Read(&line_info_type) || !parser.Read(&length)) { LOG(ERROR) << "Unable to read line info signature."; return; } switch (line_info_type) { case cci::DEBUG_S_FILECHKSMS: if (!ReadFileChecksums(name_map, &reader, length, &file_names)) return; break; case cci::DEBUG_S_LINES: if (!DumpLineInfo(file_names, out, &reader, length, indent_level)) return; break; default: // Skip over for now. DumpIndentedText(out, indent_level, "Unsupported line info type.\n"); DumpIndentedText(out, indent_level + 1, "Type: %d\n", line_info_type); DumpIndentedText(out, indent_level + 1, "Length: %d\n", length); if (!reader.Consume(length)) { LOG(ERROR) << "Failed to skip over unsupported line info type."; return; } break; } } } } // namespace void DumpModuleInfoStream(const DbiModuleInfo& module_info, const OffsetStringMap& name_table, FILE* out, PdbStream* stream) { DCHECK(stream != NULL); uint8_t indent_level = 1; DumpIndentedText(out, indent_level, "Module name: %s\n", module_info.module_name().c_str()); DumpIndentedText(out, indent_level, "Object name: %s\n", module_info.object_name().c_str()); pdb::PdbStreamReaderWithPosition reader(stream); uint32_t type = 0; if (!reader.Read(sizeof(type), &type) || type != cci::C13) { LOG(ERROR) << "Unexpected symbol stream type " << type << "."; return; } SymbolRecordVector symbols; const DbiModuleInfoBase& module_info_base = module_info.module_info_base(); ReadSymbolRecord(stream, sizeof(type), module_info_base.symbol_bytes - sizeof(type), &symbols); DumpIndentedText(out, indent_level + 1, "Symbol records:\n"); DumpSymbolRecords(out, stream, symbols, indent_level + 2); DumpIndentedText(out, indent_level + 1, "Lines:\n"); DumpLines(name_table, out, stream, module_info.module_info_base().symbol_bytes, module_info.module_info_base().lines_bytes, indent_level + 2); } } // namespace pdb
3,892
6,989
<gh_stars>1000+ # copied from python-2.7.3's traceback.py # CHANGES: # - some_str is replaced, trying to create unicode strings # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import types from six import text_type def format_exception_only(etype, value): """Format the exception part of a traceback. The arguments are the exception type and value such as given by sys.last_type and sys.last_value. The return value is a list of strings, each ending in a newline. Normally, the list contains a single string; however, for SyntaxError exceptions, it contains several lines that (when printed) display detailed information about where the syntax error occurred. The message indicating which exception occurred is always the last string in the list. """ # An instance should not have a meaningful value parameter, but # sometimes does, particularly for string exceptions, such as # >>> raise string1, string2 # deprecated # # Clear these out first because issubtype(string1, SyntaxError) # would throw another exception and mask the original problem. if ( isinstance(etype, BaseException) or isinstance(etype, types.InstanceType) or etype is None or type(etype) is str ): return [_format_final_exc_line(etype, value)] stype = etype.__name__ if not issubclass(etype, SyntaxError): return [_format_final_exc_line(stype, value)] # It was a syntax error; show exactly where the problem was found. lines = [] try: msg, (filename, lineno, offset, badline) = value.args except Exception: pass else: filename = filename or "<string>" lines.append(' File "{}", line {}\n'.format(filename, lineno)) if badline is not None: if isinstance(badline, bytes): # python 2 only badline = badline.decode("utf-8", "replace") lines.append(" {}\n".format(badline.strip())) if offset is not None: caretspace = badline.rstrip("\n")[:offset].lstrip() # non-space whitespace (likes tabs) must be kept for alignment caretspace = ((c.isspace() and c or " ") for c in caretspace) # only three spaces to account for offset1 == pos 0 lines.append(" {}^\n".format("".join(caretspace))) value = msg lines.append(_format_final_exc_line(stype, value)) return lines def _format_final_exc_line(etype, value): """Return a list of a single line -- normal case for format_exception_only""" valuestr = _some_str(value) if value is None or not valuestr: line = "{}\n".format(etype) else: line = "{}: {}\n".format(etype, valuestr) return line def _some_str(value): try: return text_type(value) except Exception: try: return bytes(value).decode("UTF-8", "replace") except Exception: pass return "<unprintable {} object>".format(type(value).__name__)
1,202
542
<gh_stars>100-1000 from decimal import Decimal from unittest import TestCase from hummingbot.connector.exchange.huobi.huobi_in_flight_order import HuobiInFlightOrder from hummingbot.core.data_type.common import OrderType, TradeType class HuobiInFlightOrderTests(TestCase): def setUp(self): super().setUp() self.base_token = "BTC" self.quote_token = "USDT" self.trading_pair = f"{self.base_token}-{self.quote_token}" def test_creation_from_json(self): order_info = { "client_order_id": "OID1", "exchange_order_id": "EOID1", "trading_pair": self.trading_pair, "order_type": OrderType.LIMIT.name, "trade_type": TradeType.BUY.name, "price": "1000", "amount": "1", "creation_timestamp": 1640001112.0, "executed_amount_base": "0.5", "executed_amount_quote": "500", "fee_asset": "USDT", "fee_paid": "5", "last_state": "closed", } order = HuobiInFlightOrder.from_json(order_info) self.assertEqual(order_info["client_order_id"], order.client_order_id) self.assertEqual(order_info["exchange_order_id"], order.exchange_order_id) self.assertEqual(order_info["trading_pair"], order.trading_pair) self.assertEqual(OrderType.LIMIT, order.order_type) self.assertEqual(TradeType.BUY, order.trade_type) self.assertEqual(Decimal(order_info["price"]), order.price) self.assertEqual(Decimal(order_info["amount"]), order.amount) self.assertEqual(1640001112.0, order.creation_timestamp) self.assertEqual(order_info["last_state"], order.last_state) self.assertEqual(Decimal(order_info["executed_amount_base"]), order.executed_amount_base) self.assertEqual(Decimal(order_info["executed_amount_quote"]), order.executed_amount_quote) self.assertEqual(Decimal(order_info["fee_paid"]), order.fee_paid) self.assertEqual(order_info["fee_asset"], order.fee_asset) self.assertEqual(order_info, order.to_json()) def test_update_with_partial_trade_event(self): order = HuobiInFlightOrder( client_order_id="OID1", exchange_order_id="99998888", trading_pair=self.trading_pair, order_type=OrderType.LIMIT, trade_type=TradeType.BUY, price=Decimal(10000), amount=Decimal(1), creation_timestamp=1640001112.0 ) trade_event_info = { "eventType": "trade", "symbol": "btcusdt", "orderId": 99998888, "tradePrice": "10050.0", "tradeVolume": "0.1", "orderSide": "buy", "aggressor": True, "tradeId": 1, "tradeTime": 998787897878, "transactFee": "10.00", "feeDeduct ": "0", "feeDeductType": "", "feeCurrency": "usdt", "accountId": 9912791, "source": "spot-api", "orderPrice": "10000", "orderSize": "1", "clientOrderId": "OID1", "orderCreateTime": 998787897878, "orderStatus": "partial-filled" } update_result = order.update_with_trade_update(trade_event_info) self.assertTrue(update_result) self.assertTrue(order.is_open) self.assertEqual("partial-filled", order.last_state) self.assertEqual(Decimal(str(trade_event_info["tradeVolume"])), order.executed_amount_base) expected_executed_quote_amount = Decimal(str(trade_event_info["tradeVolume"])) * Decimal( str(trade_event_info["tradePrice"])) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) self.assertEqual(Decimal(trade_event_info["transactFee"]), order.fee_paid) self.assertEqual(trade_event_info["feeCurrency"].upper(), order.fee_asset) def test_update_with_full_fill_trade_event(self): order = HuobiInFlightOrder( client_order_id="OID1", exchange_order_id="99998888", trading_pair=self.trading_pair, order_type=OrderType.LIMIT, trade_type=TradeType.BUY, price=Decimal(10000), amount=Decimal(1), creation_timestamp=1640001112.0 ) trade_event_info = { "eventType": "trade", "symbol": "btcusdt", "orderId": 99998888, "tradePrice": "10050.0", "tradeVolume": "0.1", "orderSide": "buy", "aggressor": True, "tradeId": 1, "tradeTime": 998787897878, "transactFee": "10.00", "feeDeduct ": "0", "feeDeductType": "", "feeCurrency": "usdt", "accountId": 9912791, "source": "spot-api", "orderPrice": "10000", "orderSize": "1", "clientOrderId": "OID1", "orderCreateTime": 998787897878, "orderStatus": "partial-filled" } update_result = order.update_with_trade_update(trade_event_info) self.assertTrue(update_result) self.assertTrue(order.is_open) self.assertEqual("partial-filled", order.last_state) self.assertEqual(Decimal(str(trade_event_info["tradeVolume"])), order.executed_amount_base) expected_executed_quote_amount = Decimal(str(trade_event_info["tradeVolume"])) * Decimal( str(trade_event_info["tradePrice"])) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) self.assertEqual(Decimal(trade_event_info["transactFee"]), order.fee_paid) self.assertEqual(trade_event_info["feeCurrency"].upper(), order.fee_asset) complete_event_info = { "eventType": "trade", "symbol": "btcusdt", "orderId": 99998888, "tradePrice": "10060.0", "tradeVolume": "0.9", "orderSide": "buy", "aggressor": True, "tradeId": 2, "tradeTime": 998787897878, "transactFee": "50.00", "feeDeduct ": "0", "feeDeductType": "", "feeCurrency": "usdt", "accountId": 9912791, "source": "spot-api", "orderPrice": "10000", "orderSize": "1", "clientOrderId": "OID1", "orderCreateTime": 998787897878, "orderStatus": "partial-filled" } update_result = order.update_with_trade_update(complete_event_info) self.assertTrue(update_result) self.assertTrue(order.is_open) self.assertEqual("partial-filled", order.last_state) self.assertEqual(order.amount, order.executed_amount_base) expected_executed_quote_amount += Decimal(str(complete_event_info["tradeVolume"])) * Decimal( str(complete_event_info["tradePrice"])) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) self.assertEqual(Decimal(trade_event_info["transactFee"]) + Decimal(complete_event_info["transactFee"]), order.fee_paid) self.assertEqual(trade_event_info["feeCurrency"].upper(), order.fee_asset) def test_update_with_repeated_trade_id_is_ignored(self): order = HuobiInFlightOrder( client_order_id="OID1", exchange_order_id="99998888", trading_pair=self.trading_pair, order_type=OrderType.LIMIT, trade_type=TradeType.BUY, price=Decimal(10000), amount=Decimal(1), creation_timestamp=1640001112.0 ) trade_event_info = { "eventType": "trade", "symbol": "btcusdt", "orderId": 99998888, "tradePrice": "10050.0", "tradeVolume": "0.1", "orderSide": "buy", "aggressor": True, "tradeId": 1, "tradeTime": 998787897878, "transactFee": "10.00", "feeDeduct ": "0", "feeDeductType": "", "feeCurrency": "usdt", "accountId": 9912791, "source": "spot-api", "orderPrice": "10000", "orderSize": "1", "clientOrderId": "OID1", "orderCreateTime": 998787897878, "orderStatus": "partial-filled" } update_result = order.update_with_trade_update(trade_event_info) self.assertTrue(update_result) self.assertTrue(order.is_open) self.assertEqual("partial-filled", order.last_state) self.assertEqual(Decimal(str(trade_event_info["tradeVolume"])), order.executed_amount_base) expected_executed_quote_amount = Decimal(str(trade_event_info["tradeVolume"])) * Decimal( str(trade_event_info["tradePrice"])) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) self.assertEqual(Decimal(trade_event_info["transactFee"]), order.fee_paid) self.assertEqual(trade_event_info["feeCurrency"].upper(), order.fee_asset) complete_event_info = { "eventType": "trade", "symbol": "btcusdt", "orderId": 99998888, "tradePrice": "10060.0", "tradeVolume": "0.9", "orderSide": "buy", "aggressor": True, "tradeId": 1, "tradeTime": 998787897878, "transactFee": "50.00", "feeDeduct ": "0", "feeDeductType": "", "feeCurrency": "usdt", "accountId": 9912791, "source": "spot-api", "orderPrice": "10000", "orderSize": "1", "clientOrderId": "OID1", "orderCreateTime": 998787897878, "orderStatus": "partial-filled" } update_result = order.update_with_trade_update(complete_event_info) self.assertFalse(update_result) self.assertTrue(order.is_open) self.assertEqual("partial-filled", order.last_state) self.assertEqual(Decimal(str(trade_event_info["tradeVolume"])), order.executed_amount_base) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) self.assertEqual(Decimal(trade_event_info["transactFee"]), order.fee_paid) self.assertEqual(trade_event_info["feeCurrency"].upper(), order.fee_asset)
5,218
563
#include "useless_static_lib2.h" #pragma warning(push) #pragma warning(disable: 4668) #pragma warning(disable: 4710) #pragma warning(disable: 4711) #include <iostream> #pragma warning(pop) namespace StaticLib2 { void UselessMethod() { std::cout << "- Useless in fact!" << std::endl; } }
132
1,338
/* * Copyright 2010, <NAME>, <EMAIL>. * Distributed under the terms of the MIT License. */ #ifndef TITLE_PLACEHOLDER_MAPPER_H #define TITLE_PLACEHOLDER_MAPPER_H #include "ActiveProcessInfo.h" #include "PatternEvaluator.h" #include "ShellInfo.h" /*! Class mapping the placeholders common for window and tab titles. */ class TitlePlaceholderMapper : public PatternEvaluator::PlaceholderMapper { public: TitlePlaceholderMapper( const ShellInfo& shellInfo, const ActiveProcessInfo& processInfo); virtual bool MapPlaceholder(char placeholder, int64 number, bool numberGiven, BString& _string); private: ShellInfo fShellInfo; ActiveProcessInfo fProcessInfo; }; class WindowTitlePlaceholderMapper : public TitlePlaceholderMapper { public: WindowTitlePlaceholderMapper( const ShellInfo& shellInfo, const ActiveProcessInfo& processInfo, int32 windowIndex, const BString& tabTitle); virtual bool MapPlaceholder(char placeholder, int64 number, bool numberGiven, BString& _string); private: int32 fWindowIndex; BString fTabTitle; }; class TabTitlePlaceholderMapper : public TitlePlaceholderMapper { public: TabTitlePlaceholderMapper( const ShellInfo& shellInfo, const ActiveProcessInfo& processInfo, int32 tabIndex); virtual bool MapPlaceholder(char placeholder, int64 number, bool numberGiven, BString& _string); private: int32 fTabIndex; }; #endif // TITLE_PLACEHOLDER_MAPPER_H
618
1,526
<gh_stars>1000+ # coding=utf-8 import sys # £ class EvilObject(object): """ Wild cackles! I have come to confuse perplex your importer with rainbows! """ sys.modules[__name__] = EvilObject()
78