repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
Kogser/bitcoin
src/qt/locale/bitcoin_ky.ts
9346
<TS language="ky" version="2.1"> <context> <name>AddressBookPage</name> <message> <source>Create a new address</source> <translation>Жаң даректи жасоо</translation> </message> <message> <source>&amp;Delete</source> <translation>Ө&amp;чүрүү</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Address</source> <translation>Дарек</translation> </message> <message> <source>(no label)</source> <translation>(аты жок)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> </context> <context> <name>BanTableModel</name> </context> <context> <name>BitcoinGUI</name> <message> <source>&amp;Transactions</source> <translation>&amp;Транзакциялар</translation> </message> <message> <source>&amp;Verify message...</source> <translation>Билдирүүнү &amp;текшерүү...</translation> </message> <message> <source>Bitcoin</source> <translation>Bitcoin</translation> </message> <message> <source>Wallet</source> <translation>Капчык</translation> </message> <message> <source>&amp;File</source> <translation>&amp;Файл</translation> </message> <message> <source>&amp;Help</source> <translation>&amp;Жардам</translation> </message> <message> <source>Error</source> <translation>Ката</translation> </message> <message> <source>Warning</source> <translation>Эскертүү</translation> </message> <message> <source>Information</source> <translation>Маалымат</translation> </message> <message> <source>Up to date</source> <translation>Жаңыланган</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>(no label)</source> <translation>(аты жок)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>&amp;Address</source> <translation>&amp;Дарек</translation> </message> </context> <context> <name>FreespaceChecker</name> </context> <context> <name>HelpMessageDialog</name> <message> <source>version</source> <translation>версия</translation> </message> </context> <context> <name>Intro</name> <message> <source>Bitcoin</source> <translation>Bitcoin</translation> </message> <message> <source>Error</source> <translation>Ката</translation> </message> </context> <context> <name>ModalOverlay</name> </context> <context> <name>OpenURIDialog</name> </context> <context> <name>OptionsDialog</name> <message> <source>MB</source> <translation>МБ</translation> </message> <message> <source>&amp;Network</source> <translation>&amp;Тармак</translation> </message> <message> <source>W&amp;allet</source> <translation>Капчык</translation> </message> <message> <source>&amp;Port:</source> <translation>&amp;Порт:</translation> </message> <message> <source>&amp;Window</source> <translation>&amp;Терезе</translation> </message> <message> <source>&amp;OK</source> <translation>&amp;Жарайт</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;Жокко чыгаруу</translation> </message> <message> <source>default</source> <translation>жарыяланбаган</translation> </message> <message> <source>none</source> <translation>жок</translation> </message> <message> <source>Error</source> <translation>Ката</translation> </message> </context> <context> <name>OverviewPage</name> </context> <context> <name>PaymentServer</name> </context> <context> <name>PeerTableModel</name> </context> <context> <name>QObject</name> </context> <context> <name>QObject::QObject</name> </context> <context> <name>QRImageWidget</name> </context> <context> <name>RPCConsole</name> <message> <source>&amp;Information</source> <translation>Маалымат</translation> </message> <message> <source>General</source> <translation>Жалпы</translation> </message> <message> <source>Network</source> <translation>&amp;Тармак</translation> </message> <message> <source>Name</source> <translation>Аты</translation> </message> <message> <source>&amp;Open</source> <translation>&amp;Ачуу</translation> </message> <message> <source>&amp;Console</source> <translation>&amp;Консоль</translation> </message> <message> <source>Clear console</source> <translation>Консолду тазалоо</translation> </message> </context> <context> <name>ReceiveCoinsDialog</name> <message> <source>&amp;Message:</source> <translation>Билдирүү:</translation> </message> </context> <context> <name>ReceiveRequestDialog</name> <message> <source>Address</source> <translation>Дарек</translation> </message> <message> <source>Message</source> <translation>Билдирүү</translation> </message> <message> <source>Wallet</source> <translation>Капчык</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>Message</source> <translation>Билдирүү</translation> </message> <message> <source>(no label)</source> <translation>(аты жок)</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <source>Clear &amp;All</source> <translation>&amp;Бардыгын тазалоо</translation> </message> <message> <source>S&amp;end</source> <translation>&amp;Жөнөтүү</translation> </message> <message> <source>(no label)</source> <translation>(аты жок)</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <source>Paste address from clipboard</source> <translation>Даректи алмашуу буферинен коюу</translation> </message> <message> <source>Message:</source> <translation>Билдирүү:</translation> </message> </context> <context> <name>SendConfirmationDialog</name> </context> <context> <name>ShutdownWindow</name> </context> <context> <name>SignVerifyMessageDialog</name> <message> <source>Paste address from clipboard</source> <translation>Даректи алмашуу буферинен коюу</translation> </message> <message> <source>Clear &amp;All</source> <translation>&amp;Бардыгын тазалоо</translation> </message> </context> <context> <name>SplashScreen</name> </context> <context> <name>TrafficGraphWidget</name> </context> <context> <name>TransactionDesc</name> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>Message</source> <translation>Билдирүү</translation> </message> </context> <context> <name>TransactionDescDialog</name> </context> <context> <name>TransactionTableModel</name> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>(no label)</source> <translation>(аты жок)</translation> </message> </context> <context> <name>TransactionView</name> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>Address</source> <translation>Дарек</translation> </message> </context> <context> <name>UnitDisplayStatusBarControl</name> </context> <context> <name>WalletFrame</name> </context> <context> <name>WalletModel</name> </context> <context> <name>WalletView</name> </context> <context> <name>bitcoin-core</name> <message> <source>Information</source> <translation>Маалымат</translation> </message> <message> <source>Warning</source> <translation>Эскертүү</translation> </message> <message> <source>Error</source> <translation>Ката</translation> </message> </context> </TS>
mit
cbingos/hongmafund
crispy_forms/templatetags/crispy_forms_filters.py
3494
# -*- coding: utf-8 -*- from django.conf import settings from django.forms import forms from django.forms.formsets import BaseFormSet from django.template import Context from django.template.loader import get_template from django.utils.functional import memoize from django.utils.safestring import mark_safe from django import template from crispy_forms.exceptions import CrispyError from crispy_forms.utils import flatatt TEMPLATE_PACK = getattr(settings, 'CRISPY_TEMPLATE_PACK', 'bootstrap') DEBUG = getattr(settings, 'DEBUG', False) def uni_formset_template(template_pack=TEMPLATE_PACK): return get_template('%s/uni_formset.html' % template_pack) uni_formset_template = memoize(uni_formset_template, {}, 1) def uni_form_template(template_pack=TEMPLATE_PACK): return get_template('%s/uni_form.html' % template_pack) uni_form_template = memoize(uni_form_template, {}, 1) register = template.Library() @register.filter(name='crispy') def as_crispy_form(form, template_pack=TEMPLATE_PACK, label_class="", field_class=""): """ The original and still very useful way to generate a div elegant form/formset:: {% load crispy_forms_tags %} <form class="uniForm" method="post"> {% csrf_token %} {{ myform|crispy }} </form> or, if you want to explicitly set the template pack:: {{ myform|crispy:"bootstrap" }} In ``bootstrap3`` for horizontal forms you can do:: {{ myform|label_class:"col-lg-2",field_class:"col-lg-8" }} """ if isinstance(form, BaseFormSet): template = uni_formset_template(template_pack) c = Context({ 'formset': form, 'form_show_errors': True, 'form_show_labels': True, 'label_class': label_class, 'field_class': field_class, }) else: template = uni_form_template(template_pack) c = Context({ 'form': form, 'form_show_errors': True, 'form_show_labels': True, 'label_class': label_class, 'field_class': field_class, }) return template.render(c) @register.filter(name='as_crispy_errors') def as_crispy_errors(form, template_pack=TEMPLATE_PACK): """ Renders only form errors the same way as django-crispy-forms:: {% load crispy_forms_tags %} {{ form|as_crispy_errors }} or:: {{ form|as_crispy_errors:"bootstrap" }} """ if isinstance(form, BaseFormSet): template = get_template('%s/errors_formset.html' % template_pack) c = Context({'formset': form}) else: template = get_template('%s/errors.html' % template_pack) c = Context({'form': form}) return template.render(c) @register.filter(name='as_crispy_field') def as_crispy_field(field, template_pack=TEMPLATE_PACK): """ Renders a form field like a django-crispy-forms field:: {% load crispy_forms_tags %} {{ form.field|as_crispy_field }} or:: {{ form.field|as_crispy_field:"bootstrap" }} """ if not isinstance(field, forms.BoundField) and DEBUG: raise CrispyError('|as_crispy_field got passed an invalid or inexistent field') template = get_template('%s/field.html' % template_pack) c = Context({'field': field, 'form_show_errors': True, 'form_show_labels': True}) return template.render(c) @register.filter(name='flatatt') def flatatt_filter(attrs): return mark_safe(flatatt(attrs))
mit
mheld/mongoid
lib/mongoid/extensions.rb
3476
# encoding: utf-8 require "mongoid/extensions/time_conversions" require "mongoid/extensions/array/accessors" require "mongoid/extensions/array/assimilation" require "mongoid/extensions/array/conversions" require "mongoid/extensions/array/parentization" require "mongoid/extensions/set/conversions" require "mongoid/extensions/big_decimal/conversions" require "mongoid/extensions/binary/conversions" require "mongoid/extensions/boolean/conversions" require "mongoid/extensions/date/conversions" require "mongoid/extensions/datetime/conversions" require "mongoid/extensions/false_class/equality" require "mongoid/extensions/float/conversions" require "mongoid/extensions/hash/accessors" require "mongoid/extensions/hash/assimilation" require "mongoid/extensions/hash/conversions" require "mongoid/extensions/hash/criteria_helpers" require "mongoid/extensions/hash/scoping" require "mongoid/extensions/integer/conversions" require "mongoid/extensions/nil/assimilation" require "mongoid/extensions/object/conversions" require "mongoid/extensions/proc/scoping" require "mongoid/extensions/string/conversions" require "mongoid/extensions/string/inflections" require "mongoid/extensions/symbol/inflections" require "mongoid/extensions/symbol/conversions" require "mongoid/extensions/true_class/equality" require "mongoid/extensions/object_id/conversions" class Array #:nodoc include Mongoid::Extensions::Array::Accessors include Mongoid::Extensions::Array::Assimilation include Mongoid::Extensions::Array::Conversions include Mongoid::Extensions::Array::Parentization end class Set #:nodoc include Mongoid::Extensions::Set::Conversions end class BigDecimal #:nodoc extend Mongoid::Extensions::BigDecimal::Conversions end class Binary #:nodoc extend Mongoid::Extensions::Binary::Conversions end class Boolean #:nodoc include Mongoid::Extensions::Boolean::Conversions end class DateTime #:nodoc extend Mongoid::Extensions::TimeConversions extend Mongoid::Extensions::DateTime::Conversions end class Date #:nodoc extend Mongoid::Extensions::TimeConversions extend Mongoid::Extensions::Date::Conversions end class FalseClass #:nodoc include Mongoid::Extensions::FalseClass::Equality end class Float #:nodoc extend Mongoid::Extensions::Float::Conversions end class Hash #:nodoc include Mongoid::Extensions::Hash::Accessors include Mongoid::Extensions::Hash::Assimilation include Mongoid::Extensions::Hash::CriteriaHelpers include Mongoid::Extensions::Hash::Scoping include Mongoid::Extensions::Hash::Conversions end class Integer #:nodoc extend Mongoid::Extensions::Integer::Conversions end class NilClass #:nodoc include Mongoid::Extensions::Nil::Assimilation end class Object #:nodoc: include Mongoid::Extensions::Object::Conversions end class Proc #:nodoc: include Mongoid::Extensions::Proc::Scoping end class String #:nodoc include Mongoid::Extensions::String::Inflections extend Mongoid::Extensions::String::Conversions end class Symbol #:nodoc remove_method :size if instance_methods.include? :size # temporal fix for ruby 1.9 include Mongoid::Extensions::Symbol::Inflections include Mongoid::Extensions::Symbol::Conversions end class Time #:nodoc extend Mongoid::Extensions::TimeConversions end class TrueClass #:nodoc include Mongoid::Extensions::TrueClass::Equality end class BSON::ObjectId #:nodoc extend Mongoid::Extensions::ObjectId::Conversions def as_json(options = nil) to_s end end
mit
mocsy/coreclr
tests/src/CoreMangLib/cti/system/collections/generic/dictionary/dictionaryidictionarycontains.cs
4278
using System; using System.Collections; using System.Collections.Generic; /// <summary> /// System.Collections.IDictionary.Contains(System.Object) /// </summary> public class DictionaryIDictionaryContains { #region Public Methods public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; // // TODO: Add your negative test cases here // // TestLibrary.TestFramework.LogInformation("[Negative]"); retVal = NegTest1() && retVal; return retVal; } #region Positive Test Cases public bool PosTest1() { bool retVal = true; // Add your scenario description here TestLibrary.TestFramework.BeginScenario("PosTest1: Verify method IDictionaryContains when specified key existed."); try { IDictionary dictionary = new Dictionary<string, string>(); dictionary.Add("txt", "notepad.exe"); dictionary.Add("bmp", "paint.exe"); dictionary.Add("dib", "paint.exe"); dictionary.Add("rtf", "wordpad.exe"); bool testVerify = dictionary.Contains("txt") && dictionary.Contains("bmp") && dictionary.Contains("dib") && dictionary.Contains("rtf"); if (testVerify == false) { TestLibrary.TestFramework.LogError("001.1", "Method IDictionaryContains Err ."); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("001.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; // Add your scenario description here TestLibrary.TestFramework.BeginScenario("PosTest2: Verify method IDictionaryContains when no specified key existed."); try { IDictionary dictionary = new Dictionary<string, string>(); if (dictionary.Contains("txt") == true) { TestLibrary.TestFramework.LogError("002.1", "Method IDictionaryContains Err ."); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("002.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } #endregion #region Nagetive Test Cases public bool NegTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest1: ArgumentNullException is not thrown."); try { IDictionary dictionary = new Dictionary<string, string>(); dictionary.Add("txt", "notepad.exe"); dictionary.Add("bmp", "paint.exe"); dictionary.Add("dib", "paint.exe"); dictionary.Add("rtf", "wordpad.exe"); string key = null; dictionary.Contains(key); TestLibrary.TestFramework.LogError("101.1", "ArgumentNullException is not thrown."); retVal = false; } catch (ArgumentNullException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("101.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } #endregion #endregion public static int Main() { DictionaryIDictionaryContains test = new DictionaryIDictionaryContains(); TestLibrary.TestFramework.BeginTestCase("DictionaryIDictionaryContains"); if (test.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } }
mit
marksmeltzer/corefx
src/System.Security.Principal.Windows/src/System/Security/Principal/TokenAccessLevels.cs
1220
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Security.Principal { [Serializable] [Flags] public enum TokenAccessLevels { AssignPrimary = 0x00000001, Duplicate = 0x00000002, Impersonate = 0x00000004, Query = 0x00000008, QuerySource = 0x00000010, AdjustPrivileges = 0x00000020, AdjustGroups = 0x00000040, AdjustDefault = 0x00000080, AdjustSessionId = 0x00000100, Read = 0x00020000 | Query, Write = 0x00020000 | AdjustPrivileges | AdjustGroups | AdjustDefault, AllAccess = 0x000F0000 | AssignPrimary | Duplicate | Impersonate | Query | QuerySource | AdjustPrivileges | AdjustGroups | AdjustDefault | AdjustSessionId, MaximumAllowed = 0x02000000 } }
mit
zakkyzebra/Blog
public/ckeditor/build-config.js
2435
/** * @license Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved. * For licensing, see LICENSE.md or http://ckeditor.com/license */ /** * This file was added automatically by CKEditor builder. * You may re-use it at any time to build CKEditor again. * * If you would like to build CKEditor online again * (for example to upgrade), visit one the following links: * * (1) http://ckeditor.com/builder * Visit online builder to build CKEditor from scratch. * * (2) http://ckeditor.com/builder/3fbd7430c5ae23bca5707ed2ab706bdf * Visit online builder to build CKEditor, starting with the same setup as before. * * (3) http://ckeditor.com/builder/download/3fbd7430c5ae23bca5707ed2ab706bdf * Straight download link to the latest version of CKEditor (Optimized) with the same setup as before. * * NOTE: * This file is not used by CKEditor, you may remove it. * Changing this file will not change your CKEditor configuration. */ var CKBUILDER_CONFIG = { skin: 'moono', preset: 'basic', ignore: [ '.bender', 'bender.js', 'bender-err.log', 'bender-out.log', 'dev', '.DS_Store', '.editorconfig', '.gitattributes', '.gitignore', 'gruntfile.js', '.idea', '.jscsrc', '.jshintignore', '.jshintrc', 'less', '.mailmap', 'node_modules', 'package.json', 'README.md', 'tests' ], plugins : { 'about' : 1, 'basicstyles' : 1, 'clipboard' : 1, 'enterkey' : 1, 'entities' : 1, 'floatingspace' : 1, 'indentlist' : 1, 'link' : 1, 'list' : 1, 'toolbar' : 1, 'undo' : 1, 'wysiwygarea' : 1 }, languages : { 'af' : 1, 'ar' : 1, 'bg' : 1, 'bn' : 1, 'bs' : 1, 'ca' : 1, 'cs' : 1, 'cy' : 1, 'da' : 1, 'de' : 1, 'el' : 1, 'en' : 1, 'en-au' : 1, 'en-ca' : 1, 'en-gb' : 1, 'eo' : 1, 'es' : 1, 'et' : 1, 'eu' : 1, 'fa' : 1, 'fi' : 1, 'fo' : 1, 'fr' : 1, 'fr-ca' : 1, 'gl' : 1, 'gu' : 1, 'he' : 1, 'hi' : 1, 'hr' : 1, 'hu' : 1, 'id' : 1, 'is' : 1, 'it' : 1, 'ja' : 1, 'ka' : 1, 'km' : 1, 'ko' : 1, 'ku' : 1, 'lt' : 1, 'lv' : 1, 'mk' : 1, 'mn' : 1, 'ms' : 1, 'nb' : 1, 'nl' : 1, 'no' : 1, 'pl' : 1, 'pt' : 1, 'pt-br' : 1, 'ro' : 1, 'ru' : 1, 'si' : 1, 'sk' : 1, 'sl' : 1, 'sq' : 1, 'sr' : 1, 'sr-latn' : 1, 'sv' : 1, 'th' : 1, 'tr' : 1, 'tt' : 1, 'ug' : 1, 'uk' : 1, 'vi' : 1, 'zh' : 1, 'zh-cn' : 1 } };
mit
fredcollet/Sylius
src/Sylius/Bundle/UserBundle/spec/Form/Type/UserLoginTypeSpec.php
1197
<?php /* * This file is part of the Sylius package. * * (c) Paweł Jędrzejewski * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace spec\Sylius\Bundle\UserBundle\Form\Type; use PhpSpec\ObjectBehavior; use Prophecy\Argument; use Symfony\Component\Form\AbstractType; use Symfony\Component\Form\FormBuilderInterface; /** * @author Łukasz Chruściel <[email protected]> */ class UserLoginTypeSpec extends ObjectBehavior { function it_is_initializable() { $this->shouldHaveType('Sylius\Bundle\UserBundle\Form\Type\UserLoginType'); } function it_extends_abstract_type() { $this->shouldHaveType(AbstractType::class); } function it_has_name() { $this->getName()->shouldReturn('sylius_user_security_login'); } function it_builds_form(FormBuilderInterface $builder) { $builder->add('_username', 'text', Argument::any())->shouldBeCalled()->willReturn($builder); $builder->add('_password', 'password', Argument::any())->shouldBeCalled()->willReturn($builder); $this->buildForm($builder, []); } }
mit
avinashsivaraman/railscasts-episodes
episode-203/detour/test/functional/info_controller_test.rb
164
require 'test_helper' class InfoControllerTest < ActionController::TestCase # Replace this with your real tests. test "the truth" do assert true end end
mit
tsingfeng/ss-panel-v3-mod
app/Models/PasswordReset.php
148
<?php namespace App\Models; class PasswordReset extends Model { protected $connection = "default"; protected $table = 'ss_password_reset'; }
mit
SpoonLabs/astor
examples/Math-0c1ef/src/main/java/org/apache/commons/math3/optimization/univariate/BaseUnivariateOptimizer.java
4001
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.optimization.univariate; import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.optimization.BaseOptimizer; import org.apache.commons.math3.optimization.GoalType; /** * This interface is mainly intended to enforce the internal coherence of * Commons-Math. Users of the API are advised to base their code on * the following interfaces: * <ul> * <li>{@link org.apache.commons.math3.optimization.univariate.UnivariateOptimizer}</li> * </ul> * * @param <FUNC> Type of the objective function to be optimized. * * @deprecated As of 3.1 (to be removed in 4.0). * @since 3.0 */ @Deprecated public interface BaseUnivariateOptimizer<FUNC extends UnivariateFunction> extends BaseOptimizer<UnivariatePointValuePair> { /** * Find an optimum in the given interval. * * An optimizer may require that the interval brackets a single optimum. * * @param f Function to optimize. * @param goalType Type of optimization goal: either * {@link GoalType#MAXIMIZE} or {@link GoalType#MINIMIZE}. * @param min Lower bound for the interval. * @param max Upper bound for the interval. * @param maxEval Maximum number of function evaluations. * @return a (point, value) pair where the function is optimum. * @throws org.apache.commons.math3.exception.TooManyEvaluationsException * if the maximum evaluation count is exceeded. * @throws org.apache.commons.math3.exception.ConvergenceException * if the optimizer detects a convergence problem. * @throws IllegalArgumentException if {@code min > max} or the endpoints * do not satisfy the requirements specified by the optimizer. */ UnivariatePointValuePair optimize(int maxEval, FUNC f, GoalType goalType, double min, double max); /** * Find an optimum in the given interval, start at startValue. * An optimizer may require that the interval brackets a single optimum. * * @param f Function to optimize. * @param goalType Type of optimization goal: either * {@link GoalType#MAXIMIZE} or {@link GoalType#MINIMIZE}. * @param min Lower bound for the interval. * @param max Upper bound for the interval. * @param startValue Start value to use. * @param maxEval Maximum number of function evaluations. * @return a (point, value) pair where the function is optimum. * @throws org.apache.commons.math3.exception.TooManyEvaluationsException * if the maximum evaluation count is exceeded. * @throws org.apache.commons.math3.exception.ConvergenceException if the * optimizer detects a convergence problem. * @throws IllegalArgumentException if {@code min > max} or the endpoints * do not satisfy the requirements specified by the optimizer. * @throws org.apache.commons.math3.exception.NullArgumentException if any * argument is {@code null}. */ UnivariatePointValuePair optimize(int maxEval, FUNC f, GoalType goalType, double min, double max, double startValue); }
gpl-2.0
dantaylorseo/TailoredWebsite
clients/hpb/wp-content/plugins/woocommerce/includes/admin/meta-boxes/views/html-order-shipping.php
4825
<?php if ( ! defined( 'ABSPATH' ) ) { exit; // Exit if accessed directly } ?> <tr class="shipping <?php echo ( ! empty( $class ) ) ? $class : ''; ?>" data-order_item_id="<?php echo $item_id; ?>"> <td class="check-column"><input type="checkbox" /></td> <td class="thumb"><div></div></td> <td class="name"> <div class="view"> <?php echo ! empty( $item['name'] ) ? esc_html( $item['name'] ) : __( 'Shipping', 'woocommerce' ); ?> </div> <div class="edit" style="display: none;"> <input type="text" placeholder="<?php _e( 'Shipping Name', 'woocommerce' ); ?>" name="shipping_method_title[<?php echo $item_id; ?>]" value="<?php echo ( isset( $item['name'] ) ) ? esc_attr( $item['name'] ) : ''; ?>" /> <select name="shipping_method[<?php echo $item_id; ?>]"> <optgroup label="<?php _e( 'Shipping Method', 'woocommerce' ); ?>"> <option value=""><?php _e( 'N/A', 'woocommerce' ); ?></option> <?php $found_method = false; foreach ( $shipping_methods as $method ) { $method_id = isset( $item['method_id'] ) ? $item['method_id'] : ''; $current_method = ( 0 === strpos( $method_id, $method->id ) ) ? $method_id : $method->id; echo '<option value="' . esc_attr( $current_method ) . '" ' . selected( $method_id == $current_method, true, false ) . '>' . esc_html( $method->get_title() ) . '</option>'; if ( $method_id == $current_method ) { $found_method = true; } } if ( ! $found_method && ! empty( $method_id ) ) { echo '<option value="' . esc_attr( $method_id ) . '" selected="selected">' . __( 'Other', 'woocommerce' ) . '</option>'; } else { echo '<option value="other">' . __( 'Other', 'woocommerce' ) . '</option>'; } ?> </optgroup> </select> <input type="hidden" name="shipping_method_id[]" value="<?php echo esc_attr( $item_id ); ?>" /> </div> </td> <?php do_action( 'woocommerce_admin_order_item_values', null, $item, absint( $item_id ) ); ?> <td class="quantity" width="1%">&nbsp;</td> <td class="line_cost" width="1%"> <div class="view"> <?php echo ( isset( $item['cost'] ) ) ? wc_price( wc_round_tax_total( $item['cost'] ) ) : ''; if ( $refunded = $order->get_total_refunded_for_item( $item_id, 'shipping' ) ) { echo '<small class="refunded">-' . wc_price( $refunded ) . '</small>'; } ?> </div> <div class="edit" style="display: none;"> <input type="text" name="shipping_cost[<?php echo $item_id; ?>]" placeholder="<?php echo wc_format_localized_price( 0 ); ?>" value="<?php echo ( isset( $item['cost'] ) ) ? esc_attr( wc_format_localized_price( $item['cost'] ) ) : ''; ?>" class="line_total wc_input_price" /> </div> <div class="refund" style="display: none;"> <input type="text" name="refund_line_total[<?php echo absint( $item_id ); ?>]" placeholder="<?php echo wc_format_localized_price( 0 ); ?>" class="refund_line_total wc_input_price" /> </div> </td> <?php if ( isset( $legacy_order ) && ! $legacy_order && 'yes' == get_option( 'woocommerce_calc_taxes' ) ) : $shipping_taxes = isset( $item['taxes'] ) ? $item['taxes'] : ''; $tax_data = maybe_unserialize( $shipping_taxes ); foreach ( $order_taxes as $tax_item ) : $tax_item_id = $tax_item['rate_id']; $tax_item_total = isset( $tax_data[ $tax_item_id ] ) ? $tax_data[ $tax_item_id ] : ''; ?> <td class="line_tax" width="1%"> <div class="view"> <?php echo ( '' != $tax_item_total ) ? wc_price( wc_round_tax_total( $tax_item_total ) ) : '&ndash;'; if ( $refunded = $order->get_tax_refunded_for_item( $item_id, $tax_item_id, 'shipping' ) ) { echo '<small class="refunded">-' . wc_price( $refunded ) . '</small>'; } ?> </div> <div class="edit" style="display: none;"> <input type="text" name="shipping_taxes[<?php echo absint( $item_id ); ?>][<?php echo absint( $tax_item_id ); ?>]" placeholder="<?php echo wc_format_localized_price( 0 ); ?>" value="<?php echo ( isset( $tax_item_total ) ) ? esc_attr( wc_format_localized_price( $tax_item_total ) ) : ''; ?>" class="line_tax wc_input_price" /> </div> <div class="refund" style="display: none;"> <input type="text" name="refund_line_tax[<?php echo absint( $item_id ); ?>][<?php echo absint( $tax_item_id ); ?>]" placeholder="<?php echo wc_format_localized_price( 0 ); ?>" class="refund_line_tax wc_input_price" data-tax_id="<?php echo absint( $tax_item_id ); ?>" /> </div> </td> <?php endforeach; endif; ?> <td class="wc-order-edit-line-item"> <?php if ( $order->is_editable() ) : ?> <div class="wc-order-edit-line-item-actions"> <a class="edit-order-item" href="#"></a><a class="delete-order-item" href="#"></a> </div> <?php endif; ?> </td> </tr>
gpl-2.0
chemissi/P2
src/public/app/code/core/Mage/Core/Model/Mysql4/Store/Group/Collection.php
1219
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Core * @copyright Copyright (c) 2014 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Store group collection * * @category Mage * @package Mage_Core * @author Magento Core Team <[email protected]> */ class Mage_Core_Model_Mysql4_Store_Group_Collection extends Mage_Core_Model_Resource_Store_Group_Collection { }
gpl-2.0
tkxhoa/movie
wp-content/plugins/postman-smtp/Postman/Postman-Mail/Zend-1.12.10/Mail/Protocol/Imap.php
27920
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * @category Zend * @package Postman_Zend_Mail * @subpackage Protocol * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id$ */ /** * @category Zend * @package Postman_Zend_Mail * @subpackage Protocol * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Postman_Zend_Mail_Protocol_Imap { /** * Default timeout in seconds for initiating session */ const TIMEOUT_CONNECTION = 30; /** * socket to imap server * @var resource|null */ protected $_socket; /** * counter for request tag * @var int */ protected $_tagCount = 0; /** * Public constructor * * @param string $host hostname or IP address of IMAP server, if given connect() is called * @param int|null $port port of IMAP server, null for default (143 or 993 for ssl) * @param bool $ssl use ssl? 'SSL', 'TLS' or false * @throws Postman_Zend_Mail_Protocol_Exception */ function __construct($host = '', $port = null, $ssl = false) { if ($host) { $this->connect($host, $port, $ssl); } } /** * Public destructor */ public function __destruct() { $this->logout(); } /** * Open connection to IMAP server * * @param string $host hostname or IP address of IMAP server * @param int|null $port of IMAP server, default is 143 (993 for ssl) * @param string|bool $ssl use 'SSL', 'TLS' or false * @return string welcome message * @throws Postman_Zend_Mail_Protocol_Exception */ public function connect($host, $port = null, $ssl = false) { if ($ssl == 'SSL') { $host = 'ssl://' . $host; } if ($port === null) { $port = $ssl === 'SSL' ? 993 : 143; } $errno = 0; $errstr = ''; $this->_socket = @fsockopen($host, $port, $errno, $errstr, self::TIMEOUT_CONNECTION); if (!$this->_socket) { /** * @see Postman_Zend_Mail_Protocol_Exception */ require_once 'Zend/Mail/Protocol/Exception.php'; throw new Postman_Zend_Mail_Protocol_Exception('cannot connect to host; error = ' . $errstr . ' (errno = ' . $errno . ' )'); } if (!$this->_assumedNextLine('* OK')) { /** * @see Postman_Zend_Mail_Protocol_Exception */ require_once 'Zend/Mail/Protocol/Exception.php'; throw new Postman_Zend_Mail_Protocol_Exception('host doesn\'t allow connection'); } if ($ssl === 'TLS') { $result = $this->requestAndResponse('STARTTLS'); $result = $result && stream_socket_enable_crypto($this->_socket, true, STREAM_CRYPTO_METHOD_TLS_CLIENT); if (!$result) { /** * @see Postman_Zend_Mail_Protocol_Exception */ require_once 'Zend/Mail/Protocol/Exception.php'; throw new Postman_Zend_Mail_Protocol_Exception('cannot enable TLS'); } } } /** * get the next line from socket with error checking, but nothing else * * @return string next line * @throws Postman_Zend_Mail_Protocol_Exception */ protected function _nextLine() { $line = @fgets($this->_socket); if ($line === false) { /** * @see Postman_Zend_Mail_Protocol_Exception */ require_once 'Zend/Mail/Protocol/Exception.php'; throw new Postman_Zend_Mail_Protocol_Exception('cannot read - connection closed?'); } return $line; } /** * get next line and assume it starts with $start. some requests give a simple * feedback so we can quickly check if we can go on. * * @param string $start the first bytes we assume to be in the next line * @return bool line starts with $start * @throws Postman_Zend_Mail_Protocol_Exception */ protected function _assumedNextLine($start) { $line = $this->_nextLine(); return strpos($line, $start) === 0; } /** * get next line and split the tag. that's the normal case for a response line * * @param string $tag tag of line is returned by reference * @return string next line * @throws Postman_Zend_Mail_Protocol_Exception */ protected function _nextTaggedLine(&$tag) { $line = $this->_nextLine(); // seperate tag from line list($tag, $line) = explode(' ', $line, 2); return $line; } /** * split a given line in tokens. a token is literal of any form or a list * * @param string $line line to decode * @return array tokens, literals are returned as string, lists as array * @throws Postman_Zend_Mail_Protocol_Exception */ protected function _decodeLine($line) { $tokens = array(); $stack = array(); /* We start to decode the response here. The unterstood tokens are: literal "literal" or also "lit\\er\"al" {bytes}<NL>literal (literals*) All tokens are returned in an array. Literals in braces (the last unterstood token in the list) are returned as an array of tokens. I.e. the following response: "foo" baz {3}<NL>bar ("f\\\"oo" bar) would be returned as: array('foo', 'baz', 'bar', array('f\\\"oo', 'bar')); // TODO: add handling of '[' and ']' to parser for easier handling of response text */ // replace any trailling <NL> including spaces with a single space $line = rtrim($line) . ' '; while (($pos = strpos($line, ' ')) !== false) { $token = substr($line, 0, $pos); while ($token[0] == '(') { array_push($stack, $tokens); $tokens = array(); $token = substr($token, 1); } if ($token[0] == '"') { if (preg_match('%^\(*"((.|\\\\|\\")*?)" *%', $line, $matches)) { $tokens[] = $matches[1]; $line = substr($line, strlen($matches[0])); continue; } } if ($token[0] == '{') { $endPos = strpos($token, '}'); $chars = substr($token, 1, $endPos - 1); if (is_numeric($chars)) { $token = ''; while (strlen($token) < $chars) { $token .= $this->_nextLine(); } $line = ''; if (strlen($token) > $chars) { $line = substr($token, $chars); $token = substr($token, 0, $chars); } else { $line .= $this->_nextLine(); } $tokens[] = $token; $line = trim($line) . ' '; continue; } } if ($stack && $token[strlen($token) - 1] == ')') { // closing braces are not seperated by spaces, so we need to count them $braces = strlen($token); $token = rtrim($token, ')'); // only count braces if more than one $braces -= strlen($token) + 1; // only add if token had more than just closing braces if (rtrim($token) != '') { $tokens[] = rtrim($token); } $token = $tokens; $tokens = array_pop($stack); // special handline if more than one closing brace while ($braces-- > 0) { $tokens[] = $token; $token = $tokens; $tokens = array_pop($stack); } } $tokens[] = $token; $line = substr($line, $pos + 1); } // maybe the server forgot to send some closing braces while ($stack) { $child = $tokens; $tokens = array_pop($stack); $tokens[] = $child; } return $tokens; } /** * read a response "line" (could also be more than one real line if response has {..}<NL>) * and do a simple decode * * @param array|string $tokens decoded tokens are returned by reference, if $dontParse * is true the unparsed line is returned here * @param string $wantedTag check for this tag for response code. Default '*' is * continuation tag. * @param bool $dontParse if true only the unparsed line is returned $tokens * @return bool if returned tag matches wanted tag * @throws Postman_Zend_Mail_Protocol_Exception */ public function readLine(&$tokens = array(), $wantedTag = '*', $dontParse = false) { $line = $this->_nextTaggedLine($tag); if (!$dontParse) { $tokens = $this->_decodeLine($line); } else { $tokens = $line; } // if tag is wanted tag we might be at the end of a multiline response return $tag == $wantedTag; } /** * read all lines of response until given tag is found (last line of response) * * @param string $tag the tag of your request * @param string|array $filter you can filter the response so you get only the * given response lines * @param bool $dontParse if true every line is returned unparsed instead of * the decoded tokens * @return null|bool|array tokens if success, false if error, null if bad request * @throws Postman_Zend_Mail_Protocol_Exception */ public function readResponse($tag, $dontParse = false) { $lines = array(); while (!$this->readLine($tokens, $tag, $dontParse)) { $lines[] = $tokens; } if ($dontParse) { // last to chars are still needed for response code $tokens = array(substr($tokens, 0, 2)); } // last line has response code if ($tokens[0] == 'OK') { return $lines ? $lines : true; } else if ($tokens[0] == 'NO'){ return false; } return null; } /** * send a request * * @param string $command your request command * @param array $tokens additional parameters to command, use escapeString() to prepare * @param string $tag provide a tag otherwise an autogenerated is returned * @return null * @throws Postman_Zend_Mail_Protocol_Exception */ public function sendRequest($command, $tokens = array(), &$tag = null) { if (!$tag) { ++$this->_tagCount; $tag = 'TAG' . $this->_tagCount; } $line = $tag . ' ' . $command; foreach ($tokens as $token) { if (is_array($token)) { if (@fputs($this->_socket, $line . ' ' . $token[0] . "\r\n") === false) { /** * @see Postman_Zend_Mail_Protocol_Exception */ require_once 'Zend/Mail/Protocol/Exception.php'; throw new Postman_Zend_Mail_Protocol_Exception('cannot write - connection closed?'); } if (!$this->_assumedNextLine('+ ')) { /** * @see Postman_Zend_Mail_Protocol_Exception */ require_once 'Zend/Mail/Protocol/Exception.php'; throw new Postman_Zend_Mail_Protocol_Exception('cannot send literal string'); } $line = $token[1]; } else { $line .= ' ' . $token; } } if (@fputs($this->_socket, $line . "\r\n") === false) { /** * @see Postman_Zend_Mail_Protocol_Exception */ require_once 'Zend/Mail/Protocol/Exception.php'; throw new Postman_Zend_Mail_Protocol_Exception('cannot write - connection closed?'); } } /** * send a request and get response at once * * @param string $command command as in sendRequest() * @param array $tokens parameters as in sendRequest() * @param bool $dontParse if true unparsed lines are returned instead of tokens * @return mixed response as in readResponse() * @throws Postman_Zend_Mail_Protocol_Exception */ public function requestAndResponse($command, $tokens = array(), $dontParse = false) { $this->sendRequest($command, $tokens, $tag); $response = $this->readResponse($tag, $dontParse); return $response; } /** * escape one or more literals i.e. for sendRequest * * @param string|array $string the literal/-s * @return string|array escape literals, literals with newline ar returned * as array('{size}', 'string'); */ public function escapeString($string) { if (func_num_args() < 2) { if (strpos($string, "\n") !== false) { return array('{' . strlen($string) . '}', $string); } else { return '"' . str_replace(array('\\', '"'), array('\\\\', '\\"'), $string) . '"'; } } $result = array(); foreach (func_get_args() as $string) { $result[] = $this->escapeString($string); } return $result; } /** * escape a list with literals or lists * * @param array $list list with literals or lists as PHP array * @return string escaped list for imap */ public function escapeList($list) { $result = array(); foreach ($list as $k => $v) { if (!is_array($v)) { // $result[] = $this->escapeString($v); $result[] = $v; continue; } $result[] = $this->escapeList($v); } return '(' . implode(' ', $result) . ')'; } /** * Login to IMAP server. * * @param string $user username * @param string $password password * @return bool success * @throws Postman_Zend_Mail_Protocol_Exception */ public function login($user, $password) { return $this->requestAndResponse('LOGIN', $this->escapeString($user, $password), true); } /** * logout of imap server * * @return bool success */ public function logout() { $result = false; if ($this->_socket) { try { $result = $this->requestAndResponse('LOGOUT', array(), true); } catch (Postman_Zend_Mail_Protocol_Exception $e) { // ignoring exception } fclose($this->_socket); $this->_socket = null; } return $result; } /** * Get capabilities from IMAP server * * @return array list of capabilities * @throws Postman_Zend_Mail_Protocol_Exception */ public function capability() { $response = $this->requestAndResponse('CAPABILITY'); if (!$response) { return $response; } $capabilities = array(); foreach ($response as $line) { $capabilities = array_merge($capabilities, $line); } return $capabilities; } /** * Examine and select have the same response. The common code for both * is in this method * * @param string $command can be 'EXAMINE' or 'SELECT' and this is used as command * @param string $box which folder to change to or examine * @return bool|array false if error, array with returned information * otherwise (flags, exists, recent, uidvalidity) * @throws Postman_Zend_Mail_Protocol_Exception */ public function examineOrSelect($command = 'EXAMINE', $box = 'INBOX') { $this->sendRequest($command, array($this->escapeString($box)), $tag); $result = array(); while (!$this->readLine($tokens, $tag)) { if ($tokens[0] == 'FLAGS') { array_shift($tokens); $result['flags'] = $tokens; continue; } switch ($tokens[1]) { case 'EXISTS': case 'RECENT': $result[strtolower($tokens[1])] = $tokens[0]; break; case '[UIDVALIDITY': $result['uidvalidity'] = (int)$tokens[2]; break; default: // ignore } } if ($tokens[0] != 'OK') { return false; } return $result; } /** * change folder * * @param string $box change to this folder * @return bool|array see examineOrselect() * @throws Postman_Zend_Mail_Protocol_Exception */ public function select($box = 'INBOX') { return $this->examineOrSelect('SELECT', $box); } /** * examine folder * * @param string $box examine this folder * @return bool|array see examineOrselect() * @throws Postman_Zend_Mail_Protocol_Exception */ public function examine($box = 'INBOX') { return $this->examineOrSelect('EXAMINE', $box); } /** * fetch one or more items of one or more messages * * @param string|array $items items to fetch from message(s) as string (if only one item) * or array of strings * @param int $from message for items or start message if $to !== null * @param int|null $to if null only one message ($from) is fetched, else it's the * last message, INF means last message avaible * @return string|array if only one item of one message is fetched it's returned as string * if items of one message are fetched it's returned as (name => value) * if one items of messages are fetched it's returned as (msgno => value) * if items of messages are fetchted it's returned as (msgno => (name => value)) * @throws Postman_Zend_Mail_Protocol_Exception */ public function fetch($items, $from, $to = null) { if (is_array($from)) { $set = implode(',', $from); } else if ($to === null) { $set = (int)$from; } else if ($to === INF) { $set = (int)$from . ':*'; } else { $set = (int)$from . ':' . (int)$to; } $items = (array)$items; $itemList = $this->escapeList($items); $this->sendRequest('FETCH', array($set, $itemList), $tag); $result = array(); while (!$this->readLine($tokens, $tag)) { // ignore other responses if ($tokens[1] != 'FETCH') { continue; } // ignore other messages if ($to === null && !is_array($from) && $tokens[0] != $from) { continue; } // if we only want one item we return that one directly if (count($items) == 1) { if ($tokens[2][0] == $items[0]) { $data = $tokens[2][1]; } else { // maybe the server send an other field we didn't wanted $count = count($tokens[2]); // we start with 2, because 0 was already checked for ($i = 2; $i < $count; $i += 2) { if ($tokens[2][$i] != $items[0]) { continue; } $data = $tokens[2][$i + 1]; break; } } } else { $data = array(); while (key($tokens[2]) !== null) { $data[current($tokens[2])] = next($tokens[2]); next($tokens[2]); } } // if we want only one message we can ignore everything else and just return if ($to === null && !is_array($from) && $tokens[0] == $from) { // we still need to read all lines while (!$this->readLine($tokens, $tag)); return $data; } $result[$tokens[0]] = $data; } if ($to === null && !is_array($from)) { /** * @see Postman_Zend_Mail_Protocol_Exception */ require_once 'Zend/Mail/Protocol/Exception.php'; throw new Postman_Zend_Mail_Protocol_Exception('the single id was not found in response'); } return $result; } /** * get mailbox list * * this method can't be named after the IMAP command 'LIST', as list is a reserved keyword * * @param string $reference mailbox reference for list * @param string $mailbox mailbox name match with wildcards * @return array mailboxes that matched $mailbox as array(globalName => array('delim' => .., 'flags' => ..)) * @throws Postman_Zend_Mail_Protocol_Exception */ public function listMailbox($reference = '', $mailbox = '*') { $result = array(); $list = $this->requestAndResponse('LIST', $this->escapeString($reference, $mailbox)); if (!$list || $list === true) { return $result; } foreach ($list as $item) { if (count($item) != 4 || $item[0] != 'LIST') { continue; } $result[$item[3]] = array('delim' => $item[2], 'flags' => $item[1]); } return $result; } /** * set flags * * @param array $flags flags to set, add or remove - see $mode * @param int $from message for items or start message if $to !== null * @param int|null $to if null only one message ($from) is fetched, else it's the * last message, INF means last message avaible * @param string|null $mode '+' to add flags, '-' to remove flags, everything else sets the flags as given * @param bool $silent if false the return values are the new flags for the wanted messages * @return bool|array new flags if $silent is false, else true or false depending on success * @throws Postman_Zend_Mail_Protocol_Exception */ public function store(array $flags, $from, $to = null, $mode = null, $silent = true) { $item = 'FLAGS'; if ($mode == '+' || $mode == '-') { $item = $mode . $item; } if ($silent) { $item .= '.SILENT'; } $flags = $this->escapeList($flags); $set = (int)$from; if ($to != null) { $set .= ':' . ($to == INF ? '*' : (int)$to); } $result = $this->requestAndResponse('STORE', array($set, $item, $flags), $silent); if ($silent) { return $result ? true : false; } $tokens = $result; $result = array(); foreach ($tokens as $token) { if ($token[1] != 'FETCH' || $token[2][0] != 'FLAGS') { continue; } $result[$token[0]] = $token[2][1]; } return $result; } /** * append a new message to given folder * * @param string $folder name of target folder * @param string $message full message content * @param array $flags flags for new message * @param string $date date for new message * @return bool success * @throws Postman_Zend_Mail_Protocol_Exception */ public function append($folder, $message, $flags = null, $date = null) { $tokens = array(); $tokens[] = $this->escapeString($folder); if ($flags !== null) { $tokens[] = $this->escapeList($flags); } if ($date !== null) { $tokens[] = $this->escapeString($date); } $tokens[] = $this->escapeString($message); return $this->requestAndResponse('APPEND', $tokens, true); } /** * copy message set from current folder to other folder * * @param string $folder destination folder * @param int|null $to if null only one message ($from) is fetched, else it's the * last message, INF means last message avaible * @return bool success * @throws Postman_Zend_Mail_Protocol_Exception */ public function copy($folder, $from, $to = null) { $set = (int)$from; if ($to != null) { $set .= ':' . ($to == INF ? '*' : (int)$to); } return $this->requestAndResponse('COPY', array($set, $this->escapeString($folder)), true); } /** * create a new folder (and parent folders if needed) * * @param string $folder folder name * @return bool success */ public function create($folder) { return $this->requestAndResponse('CREATE', array($this->escapeString($folder)), true); } /** * rename an existing folder * * @param string $old old name * @param string $new new name * @return bool success */ public function rename($old, $new) { return $this->requestAndResponse('RENAME', $this->escapeString($old, $new), true); } /** * remove a folder * * @param string $folder folder name * @return bool success */ public function delete($folder) { return $this->requestAndResponse('DELETE', array($this->escapeString($folder)), true); } /** * permanently remove messages * * @return bool success */ public function expunge() { // TODO: parse response? return $this->requestAndResponse('EXPUNGE'); } /** * send noop * * @return bool success */ public function noop() { // TODO: parse response return $this->requestAndResponse('NOOP'); } /** * do a search request * * This method is currently marked as internal as the API might change and is not * safe if you don't take precautions. * * @internal * @return array message ids */ public function search(array $params) { $response = $this->requestAndResponse('SEARCH', $params); if (!$response) { return $response; } foreach ($response as $ids) { if ($ids[0] == 'SEARCH') { array_shift($ids); return $ids; } } return array(); } }
gpl-2.0
PrasadG193/gcc_gimple_fe
libstdc++-v3/testsuite/27_io/basic_istream/extractors_arithmetic/wchar_t/02.cc
1256
// Copyright (C) 2004-2016 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // 27.6.1.2.2 arithmetic extractors #include <istream> #include <sstream> #include <locale> #include <testsuite_hooks.h> // elaborated test for ints bool test02() { bool test __attribute__((unused)) = true; const std::wstring str_01(L"20000AB"); std::wstringbuf strb_01(str_01, std::ios_base::in); std::wistream is(&strb_01); int n = 15; is >> n; VERIFY( n == 20000 ); wchar_t c = is.peek(); VERIFY( c == L'A' ); return test; } int main() { test02(); return 0; }
gpl-2.0
pengshp/codelite
sdk/codelite_cppcheck/lib/checktype.cpp
11016
/* * Cppcheck - A tool for static C/C++ code analysis * Copyright (C) 2007-2015 Daniel Marjamäki and Cppcheck team. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ //--------------------------------------------------------------------------- #include "checktype.h" #include "mathlib.h" #include "symboldatabase.h" #include <stack> //--------------------------------------------------------------------------- // Register this check class (by creating a static instance of it) namespace { CheckType instance; } static bool astGetSizeSign(const Settings *settings, const Token *tok, unsigned int *size, char *sign) { if (!tok) return false; if (tok->isArithmeticalOp()) { if (!astGetSizeSign(settings, tok->astOperand1(), size, sign)) return false; return !tok->astOperand2() || astGetSizeSign(settings, tok->astOperand2(), size, sign); } if (tok->isNumber() && MathLib::isInt(tok->str())) { if (tok->str().find("L") != std::string::npos) return false; MathLib::bigint value = MathLib::toLongNumber(tok->str()); unsigned int sz; if (value >= -(1<<7) && value <= (1<<7)-1) sz = 8; else if (value >= -(1<<15) && value <= (1<<15)-1) sz = 16; else if (value >= -(1LL<<31) && value <= (1LL<<31)-1) sz = 32; else return false; if (sz < 8 * settings->sizeof_int) sz = 8 * settings->sizeof_int; if (*size < sz) *size = sz; if (tok->str().find('U') != std::string::npos) *sign = 'u'; if (*sign != 'u') *sign = 's'; return true; } if (tok->isName()) { const Variable *var = tok->variable(); if (!var) return false; unsigned int sz = 0; for (const Token *type = var->typeStartToken(); type; type = type->next()) { if (type->str() == "*") return false; // <- FIXME: handle pointers if (Token::Match(type, "char|short|int")) { sz = 8 * settings->sizeof_int; if (type->isUnsigned()) *sign = 'u'; else if (*sign != 'u') *sign = 's'; } else if (Token::Match(type, "float|double|long")) { return false; } else { // TODO: try to lookup type info in library } if (type == var->typeEndToken()) break; } if (sz == 0) return false; if (*size < sz) *size = sz; return true; } return false; } //--------------------------------------------------------------------------- // Checking for shift by too many bits //--------------------------------------------------------------------------- void CheckType::checkTooBigBitwiseShift() { // unknown sizeof(int) => can't run this checker if (_settings->platformType == Settings::Unspecified) return; const SymbolDatabase *symbolDatabase = _tokenizer->getSymbolDatabase(); const std::size_t functions = symbolDatabase->functionScopes.size(); for (std::size_t i = 0; i < functions; ++i) { const Scope * scope = symbolDatabase->functionScopes[i]; for (const Token* tok = scope->classStart->next(); tok != scope->classEnd; tok = tok->next()) { if (tok->str() != "<<" && tok->str() != ">>") continue; if (!tok->astOperand1() || !tok->astOperand2()) continue; // get number of bits of lhs const Variable *var = tok->astOperand1()->variable(); if (!var) continue; int lhsbits = 0; for (const Token *type = var->typeStartToken(); type; type = type->next()) { if (Token::Match(type,"char|short|int") && !type->isLong()) { lhsbits = _settings->sizeof_int * 8; break; } if (type == var->typeEndToken() || type->str() == "<") break; } if (lhsbits == 0) continue; // Get biggest rhs value. preferably a value which doesn't have 'condition'. const ValueFlow::Value *value = tok->astOperand2()->getValueGE(lhsbits, _settings); if (!value) continue; if (value->condition && !_settings->isEnabled("warning")) continue; if (value->inconclusive && !_settings->inconclusive) continue; tooBigBitwiseShiftError(tok, lhsbits, *value); } } } void CheckType::tooBigBitwiseShiftError(const Token *tok, int lhsbits, const ValueFlow::Value &rhsbits) { std::list<const Token*> callstack; callstack.push_back(tok); if (rhsbits.condition) callstack.push_back(rhsbits.condition); std::ostringstream errmsg; errmsg << "Shifting " << lhsbits << "-bit value by " << rhsbits.intvalue << " bits is undefined behaviour"; if (rhsbits.condition) errmsg << ". See condition at line " << rhsbits.condition->linenr() << "."; reportError(callstack, rhsbits.condition ? Severity::warning : Severity::error, "shiftTooManyBits", errmsg.str(), rhsbits.inconclusive); } //--------------------------------------------------------------------------- // Checking for integer overflow //--------------------------------------------------------------------------- void CheckType::checkIntegerOverflow() { // unknown sizeof(int) => can't run this checker if (_settings->platformType == Settings::Unspecified) return; // max int value according to platform settings. const MathLib::bigint maxint = (1LL << (8 * _settings->sizeof_int - 1)) - 1; const SymbolDatabase *symbolDatabase = _tokenizer->getSymbolDatabase(); const std::size_t functions = symbolDatabase->functionScopes.size(); for (std::size_t i = 0; i < functions; ++i) { const Scope * scope = symbolDatabase->functionScopes[i]; for (const Token* tok = scope->classStart->next(); tok != scope->classEnd; tok = tok->next()) { if (!tok->isArithmeticalOp()) continue; // is there a overflow result value const ValueFlow::Value *value = tok->getValueGE(maxint + 1, _settings); if (!value) value = tok->getValueLE(-maxint - 2, _settings); if (!value) continue; // get size and sign of result.. unsigned int size = 0; char sign = 0; if (!astGetSizeSign(_settings, tok, &size, &sign)) continue; if (sign != 's') // only signed integer overflow is UB continue; integerOverflowError(tok, *value); } } } void CheckType::integerOverflowError(const Token *tok, const ValueFlow::Value &value) { const std::string expr(tok ? tok->expressionString() : ""); const std::string cond(value.condition ? ". See condition at line " + MathLib::toString(value.condition->linenr()) + "." : ""); reportError(tok, value.condition ? Severity::warning : Severity::error, "integerOverflow", "Signed integer overflow for expression '"+expr+"'"+cond, value.inconclusive); } //--------------------------------------------------------------------------- // Checking for sign conversion when operand can be negative //--------------------------------------------------------------------------- void CheckType::checkSignConversion() { if (!_settings->isEnabled("warning")) return; const SymbolDatabase *symbolDatabase = _tokenizer->getSymbolDatabase(); const std::size_t functions = symbolDatabase->functionScopes.size(); for (std::size_t i = 0; i < functions; ++i) { const Scope * scope = symbolDatabase->functionScopes[i]; for (const Token* tok = scope->classStart->next(); tok != scope->classEnd; tok = tok->next()) { if (!tok->isArithmeticalOp() || Token::Match(tok,"+|-")) continue; unsigned int size = 0; char sign = 0; if (!astGetSizeSign(_settings, tok, &size, &sign)) continue; if (sign != 'u') continue; // Check if there are signed operands that can be negative.. std::stack<const Token *> tokens; tokens.push(tok->astOperand1()); tokens.push(tok->astOperand2()); while (!tokens.empty()) { const Token *tok1 = tokens.top(); tokens.pop(); if (!tok1) continue; if (tok1->str() == "(") continue; // Todo: properly handle casts, function calls, etc const Variable *var = tok1->variable(); if (var && tok1->getValueLE(-1,_settings)) { bool signedvar = true; // assume that variable is signed since it can have a negative value for (const Token *type = var->typeStartToken();; type = type->next()) { if (type->isUnsigned()) { signedvar = false; break; } if (type->isSigned()) break; if (type->isName() && !Token::Match(type, "char|short|int|long|const")) { signedvar = false; break; } if (type == var->typeEndToken()) break; } if (signedvar) { signConversionError(tok1); break; } } } } } } void CheckType::signConversionError(const Token *tok) { const std::string varname(tok ? tok->str() : "var"); reportError(tok, Severity::warning, "signConversion", "Suspicious code: sign conversion of " + varname + " in calculation, even though " + varname + " can have a negative value"); }
gpl-2.0
adrian17/gcc
libstdc++-v3/testsuite/ext/pb_ds/example/trie_dna.cc
3208
// -*- C++ -*- // Copyright (C) 2005-2016 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the terms // of the GNU General Public License as published by the Free Software // Foundation; either version 3, or (at your option) any later // version. // This library is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // General Public License for more details. // You should have received a copy of the GNU General Public License // along with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL. // Permission to use, copy, modify, sell, and distribute this software // is hereby granted without fee, provided that the above copyright // notice appears in all copies, and that both that copyright notice // and this permission notice appear in supporting documentation. None // of the above authors, nor IBM Haifa Research Laboratories, make any // representation about the suitability of this software for any // purpose. It is provided "as is" without express or implied // warranty. /** * @file trie_dna_example.cpp * An example showing how to use a trie for storing DNA strings. */ /** * This example shows how to use a PATRICIA trie for storing DNA strings. The main point is writing element-access traits for these strings. */ #include <cassert> #include <iostream> #include <cstdlib> #include <ext/pb_ds/assoc_container.hpp> #include <ext/pb_ds/trie_policy.hpp> using namespace std; using namespace __gnu_pbds; // DNA is represented by a string. typedef string dna_t; // Following is an element access traits for a DNA string. struct dna_string_access_traits { public: typedef size_t size_type; typedef dna_t key_type; typedef const key_type& key_const_reference; typedef char e_type; typedef string::const_iterator const_iterator; enum { // Number of distinct elements. This is 4 = |{'A', 'C', 'G', 'T'}| max_size = 4 }; // Returns a const_iterator to the firstelement of r_key. inline static const_iterator begin(key_const_reference r_key) { return r_key.begin(); } // Returns a const_iterator to the after-lastelement of r_key. inline static const_iterator end(key_const_reference r_key) { return r_key.end(); } // Maps an element to a position. inline static size_t e_pos(e_type e) { switch(e) { case 'A': return 0; case 'C': return 1; case 'G': return 2; case 'T': return 3; default: std::abort(); }; } }; // A PATRICIA trie with DNA string element-access traits. typedef dna_string_access_traits traits_type; typedef trie<dna_t, string, traits_type> trie_type; int main() { trie_type t; // Now map some DNAs to diseases in namespace STD. t["ACCGGTTACTGGTA"] = "gonorrhea"; t["CCGTTATCGGTA"] = "syphlis"; // Check gonorrhea already contracted. assert(t.find("ACCGGTTACTGGTA") != t.end()); return 0; }
gpl-2.0
mnjkumar426/phptest
wp-content/plugins/wordpress-seo/admin/pages/metas.php
12276
<?php /** * @package Admin */ if ( ! defined( 'WPSEO_VERSION' ) ) { header( 'Status: 403 Forbidden' ); header( 'HTTP/1.1 403 Forbidden' ); exit(); } global $wpseo_admin_pages; $options = WPSEO_Options::get_all(); $wpseo_admin_pages->admin_header( true, WPSEO_Options::get_group_name( 'wpseo_titles' ), 'wpseo_titles' ); ?> <h2 class="nav-tab-wrapper" id="wpseo-tabs"> <a class="nav-tab" id="general-tab" href="#top#general"><?php _e( 'General', 'wordpress-seo' );?></a> <a class="nav-tab" id="home-tab" href="#top#home"><?php _e( 'Home', 'wordpress-seo' );?></a> <a class="nav-tab" id="post_types-tab" href="#top#post_types"><?php _e( 'Post Types', 'wordpress-seo' );?></a> <a class="nav-tab" id="taxonomies-tab" href="#top#taxonomies"><?php _e( 'Taxonomies', 'wordpress-seo' );?></a> <a class="nav-tab" id="archives-tab" href="#top#archives"><?php _e( 'Other', 'wordpress-seo' );?></a> </h2> <div class="tabwrapper"> <div id="general" class="wpseotab"> <?php echo '<h2>' . __( 'Title settings', 'wordpress-seo' ) . '</h2>'; echo $wpseo_admin_pages->checkbox( 'forcerewritetitle', __( 'Force rewrite titles', 'wordpress-seo' ) ); echo '<p class="desc">' . __( 'WordPress SEO has auto-detected whether it needs to force rewrite the titles for your pages, if you think it\'s wrong and you know what you\'re doing, you can change the setting here.', 'wordpress-seo' ) . '</p>'; echo '<h2>' . __( 'Title Separator', 'wordpress-seo' ) . '</h2>'; echo $wpseo_admin_pages->radio( 'separator', WPSEO_Option_Titles::get_instance()->get_separator_options(), '' ); echo '<p class="desc">' . __( 'Choose the symbol to use as your title separator. This will display, for instance, between your post title and site name.', 'wordpress-seo' ) . ' ' . __( 'Symbols are shown in the size they\'ll appear in in search results.', 'wordpress-seo' ) . '</p>'; echo '<h2>' . __( 'Sitewide <code>meta</code> settings', 'wordpress-seo' ) . '</h2>'; echo $wpseo_admin_pages->checkbox( 'noindex-subpages-wpseo', __( 'Noindex subpages of archives', 'wordpress-seo' ) ); echo '<p class="desc">' . __( 'If you want to prevent /page/2/ and further of any archive to show up in the search results, enable this.', 'wordpress-seo' ) . '</p>'; echo $wpseo_admin_pages->checkbox( 'usemetakeywords', __( 'Use <code>meta</code> keywords tag?', 'wordpress-seo' ) ); echo '<p class="desc">' . __( 'I don\'t know why you\'d want to use meta keywords, but if you want to, check this box.', 'wordpress-seo' ) . '</p>'; echo $wpseo_admin_pages->checkbox( 'noodp', __( 'Add <code>noodp</code> meta robots tag sitewide', 'wordpress-seo' ) ); echo '<p class="desc">' . __( 'Prevents search engines from using the DMOZ description for pages from this site in the search results.', 'wordpress-seo' ) . '</p>'; echo $wpseo_admin_pages->checkbox( 'noydir', __( 'Add <code>noydir</code> meta robots tag sitewide', 'wordpress-seo' ) ); echo '<p class="desc">' . __( 'Prevents search engines from using the Yahoo! directory description for pages from this site in the search results.', 'wordpress-seo' ) . '</p>'; echo '<h2>' . __( 'Clean up the <code>&lt;head&gt;</code>', 'wordpress-seo' ) . '</h2>'; echo $wpseo_admin_pages->checkbox( 'hide-rsdlink', __( 'Hide RSD Links', 'wordpress-seo' ) ); echo $wpseo_admin_pages->checkbox( 'hide-wlwmanifest', __( 'Hide WLW Manifest Links', 'wordpress-seo' ) ); echo $wpseo_admin_pages->checkbox( 'hide-shortlink', __( 'Hide Shortlink for posts', 'wordpress-seo' ) ); echo $wpseo_admin_pages->checkbox( 'hide-feedlinks', __( 'Hide RSS Links', 'wordpress-seo' ) ); ?> </div> <div id="home" class="wpseotab"> <?php if ( 'posts' == get_option( 'show_on_front' ) ) { echo '<h2>' . __( 'Homepage', 'wordpress-seo' ) . '</h2>'; echo $wpseo_admin_pages->textinput( 'title-home-wpseo', __( 'Title template', 'wordpress-seo' ) ); echo $wpseo_admin_pages->textarea( 'metadesc-home-wpseo', __( 'Meta description template', 'wordpress-seo' ), '', 'metadesc' ); if ( $options['usemetakeywords'] === true ) { echo $wpseo_admin_pages->textinput( 'metakey-home-wpseo', __( 'Meta keywords template', 'wordpress-seo' ) ); } } else { echo '<h2>' . __( 'Homepage &amp; Front page', 'wordpress-seo' ) . '</h2>'; echo '<p>' . sprintf( __( 'You can determine the title and description for the front page by %sediting the front page itself &raquo;%s', 'wordpress-seo' ), '<a href="' . esc_url( get_edit_post_link( get_option( 'page_on_front' ) ) ) . '">', '</a>' ) . '</p>'; if ( get_option( 'page_for_posts' ) > 0 ) { echo '<p>' . sprintf( __( 'You can determine the title and description for the blog page by %sediting the blog page itself &raquo;%s', 'wordpress-seo' ), '<a href="' . esc_url( get_edit_post_link( get_option( 'page_for_posts' ) ) ) . '">', '</a>' ) . '</p>'; } } ?> </div> <div id="post_types" class="wpseotab"> <?php $post_types = get_post_types( array( 'public' => true ), 'objects' ); if ( is_array( $post_types ) && $post_types !== array() ) { foreach ( $post_types as $pt ) { $warn = false; if ( $options['redirectattachment'] === true && $pt->name == 'attachment' ) { echo '<div class="wpseo-warning">'; $warn = true; } $name = $pt->name; echo '<h4 id="' . esc_attr( $name ) . '">' . esc_html( ucfirst( $pt->labels->name ) ) . '</h4>'; if ( $warn === true ) { echo '<h4 class="error-message">' . __( 'Take note:', 'wordpress-seo' ) . '</h4>'; echo '<p class="error-message">' . __( 'As you are redirecting attachment URLs to parent post URLs, these settings will currently only have an effect on <strong>unattached</strong> media items!', 'wordpress-seo' ) . '</p>'; echo '<p class="error-message">' . sprintf( __( 'So remember: If you change the %sattachment redirection setting%s in the future, the below settings will take effect for *all* media items.', 'wordpress-seo' ), '<a href="' . esc_url( admin_url( 'admin.php?page=wpseo_permalinks' ) ) . '">', '</a>' ) . '</p>'; } echo $wpseo_admin_pages->textinput( 'title-' . $name, __( 'Title template', 'wordpress-seo' ) ); echo $wpseo_admin_pages->textarea( 'metadesc-' . $name, __( 'Meta description template', 'wordpress-seo' ), '', 'metadesc' ); if ( $options['usemetakeywords'] === true ) { echo $wpseo_admin_pages->textinput( 'metakey-' . $name, __( 'Meta keywords template', 'wordpress-seo' ) ); } echo $wpseo_admin_pages->checkbox( 'noindex-' . $name, '<code>noindex, follow</code>', __( 'Meta Robots', 'wordpress-seo' ) ); echo $wpseo_admin_pages->checkbox( 'showdate-' . $name, __( 'Show date in snippet preview?', 'wordpress-seo' ), __( 'Date in Snippet Preview', 'wordpress-seo' ) ); echo $wpseo_admin_pages->checkbox( 'hideeditbox-' . $name, __( 'Hide', 'wordpress-seo' ), __( 'WordPress SEO Meta Box', 'wordpress-seo' ) ); /** * Allow adding a custom checkboxes to the admin meta page - Post Types tab * @api WPSEO_Admin_Pages $wpseo_admin_pages The WPSEO_Admin_Pages object * @api String $name The post type name */ do_action( 'wpseo_admin_page_meta_post_types', $wpseo_admin_pages, $name ); echo '<br/>'; if ( $warn === true ) { echo '</div>'; } unset( $warn ); } unset( $pt ); } unset( $post_types ); $post_types = get_post_types( array( '_builtin' => false, 'has_archive' => true ), 'objects' ); if ( is_array( $post_types ) && $post_types !== array() ) { echo '<h2>' . __( 'Custom Post Type Archives', 'wordpress-seo' ) . '</h2>'; echo '<p>' . __( 'Note: instead of templates these are the actual titles and meta descriptions for these custom post type archive pages.', 'wordpress-seo' ) . '</p>'; foreach ( $post_types as $pt ) { $name = $pt->name; echo '<h4>' . esc_html( ucfirst( $pt->labels->name ) ) . '</h4>'; echo $wpseo_admin_pages->textinput( 'title-ptarchive-' . $name, __( 'Title', 'wordpress-seo' ) ); echo $wpseo_admin_pages->textarea( 'metadesc-ptarchive-' . $name, __( 'Meta description', 'wordpress-seo' ), '', 'metadesc' ); if ( $options['usemetakeywords'] === true ) { echo $wpseo_admin_pages->textinput( 'metakey-ptarchive-' . $name, __( 'Meta keywords', 'wordpress-seo' ) ); } if ( $options['breadcrumbs-enable'] === true ) { echo $wpseo_admin_pages->textinput( 'bctitle-ptarchive-' . $name, __( 'Breadcrumbs title', 'wordpress-seo' ) ); } echo $wpseo_admin_pages->checkbox( 'noindex-ptarchive-' . $name, '<code>noindex, follow</code>', __( 'Meta Robots', 'wordpress-seo' ) ); } unset( $pt ); } unset( $post_types ); ?> </div> <div id="taxonomies" class="wpseotab"> <?php $taxonomies = get_taxonomies( array( 'public' => true ), 'objects' ); if ( is_array( $taxonomies ) && $taxonomies !== array() ) { foreach ( $taxonomies as $tax ) { echo '<h4>' . esc_html( ucfirst( $tax->labels->name ) ). '</h4>'; echo $wpseo_admin_pages->textinput( 'title-tax-' . $tax->name, __( 'Title template', 'wordpress-seo' ) ); echo $wpseo_admin_pages->textarea( 'metadesc-tax-' . $tax->name, __( 'Meta description template', 'wordpress-seo' ), '', 'metadesc' ); if ( $options['usemetakeywords'] === true ) { echo $wpseo_admin_pages->textinput( 'metakey-tax-' . $tax->name, __( 'Meta keywords template', 'wordpress-seo' ) ); } echo $wpseo_admin_pages->checkbox( 'noindex-tax-' . $tax->name, '<code>noindex, follow</code>', __( 'Meta Robots', 'wordpress-seo' ) ); echo $wpseo_admin_pages->checkbox( 'hideeditbox-tax-' . $tax->name, __( 'Hide', 'wordpress-seo' ), __( 'WordPress SEO Meta Box', 'wordpress-seo' ) ); echo '<br/>'; } unset( $tax ); } unset( $taxonomies ); ?> </div> <div id="archives" class="wpseotab"> <?php echo '<h4>' . __( 'Author Archives', 'wordpress-seo' ) . '</h4>'; echo $wpseo_admin_pages->textinput( 'title-author-wpseo', __( 'Title template', 'wordpress-seo' ) ); echo $wpseo_admin_pages->textarea( 'metadesc-author-wpseo', __( 'Meta description template', 'wordpress-seo' ), '', 'metadesc' ); if ( $options['usemetakeywords'] === true ) { echo $wpseo_admin_pages->textinput( 'metakey-author-wpseo', __( 'Meta keywords template', 'wordpress-seo' ) ); } echo $wpseo_admin_pages->checkbox( 'noindex-author-wpseo', '<code>noindex, follow</code>', __( 'Meta Robots', 'wordpress-seo' ) ); echo $wpseo_admin_pages->checkbox( 'disable-author', __( 'Disable the author archives', 'wordpress-seo' ), '' ); echo '<p class="desc label">' . __( 'If you\'re running a one author blog, the author archive will always look exactly the same as your homepage. And even though you may not link to it, others might, to do you harm. Disabling them here will make sure any link to those archives will be 301 redirected to the homepage.', 'wordpress-seo' ) . '</p>'; echo '<br/>'; echo '<h4>' . __( 'Date Archives', 'wordpress-seo' ) . '</h4>'; echo $wpseo_admin_pages->textinput( 'title-archive-wpseo', __( 'Title template', 'wordpress-seo' ) ); echo $wpseo_admin_pages->textarea( 'metadesc-archive-wpseo', __( 'Meta description template', 'wordpress-seo' ), '', 'metadesc' ); echo '<br/>'; echo $wpseo_admin_pages->checkbox( 'noindex-archive-wpseo', '<code>noindex, follow</code>', __( 'Meta Robots', 'wordpress-seo' ) ); echo $wpseo_admin_pages->checkbox( 'disable-date', __( 'Disable the date-based archives', 'wordpress-seo' ), '' ); echo '<p class="desc label">' . __( 'For the date based archives, the same applies: they probably look a lot like your homepage, and could thus be seen as duplicate content.', 'wordpress-seo' ) . '</p>'; echo '<h2>' . __( 'Special Pages', 'wordpress-seo' ) . '</h2>'; echo '<p>' . __( 'These pages will be noindex, followed by default, so they will never show up in search results.', 'wordpress-seo' ) . '</p>'; echo '<h4>' . __( 'Search pages', 'wordpress-seo' ) . '</h4>'; echo $wpseo_admin_pages->textinput( 'title-search-wpseo', __( 'Title template', 'wordpress-seo' ) ); echo '<h4>' . __( '404 pages', 'wordpress-seo' ) . '</h4>'; echo $wpseo_admin_pages->textinput( 'title-404-wpseo', __( 'Title template', 'wordpress-seo' ) ); echo '<br class="clear"/>'; ?> </div> <div id="template_help" class="wpseotab"> <?php echo '<h2>' . __( 'Variables', 'wordpress-seo' ) . '</h2>'; echo '</div>'; echo '</div>'; $wpseo_admin_pages->admin_footer();
gpl-2.0
leaubout/P2
src/public/app/code/core/Mage/Sales/Model/Quote/Payment.php
7164
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Sales * @copyright Copyright (c) 2014 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Quote payment information * * @method Mage_Sales_Model_Resource_Quote_Payment _getResource() * @method Mage_Sales_Model_Resource_Quote_Payment getResource() * @method int getQuoteId() * @method Mage_Sales_Model_Quote_Payment setQuoteId(int $value) * @method string getCreatedAt() * @method Mage_Sales_Model_Quote_Payment setCreatedAt(string $value) * @method string getUpdatedAt() * @method Mage_Sales_Model_Quote_Payment setUpdatedAt(string $value) * @method string getMethod() * @method Mage_Sales_Model_Quote_Payment setMethod(string $value) * @method string getCcType() * @method Mage_Sales_Model_Quote_Payment setCcType(string $value) * @method string getCcNumberEnc() * @method Mage_Sales_Model_Quote_Payment setCcNumberEnc(string $value) * @method string getCcLast4() * @method Mage_Sales_Model_Quote_Payment setCcLast4(string $value) * @method string getCcCidEnc() * @method Mage_Sales_Model_Quote_Payment setCcCidEnc(string $value) * @method string getCcOwner() * @method Mage_Sales_Model_Quote_Payment setCcOwner(string $value) * @method int getCcExpMonth() * @method Mage_Sales_Model_Quote_Payment setCcExpMonth(int $value) * @method int getCcExpYear() * @method Mage_Sales_Model_Quote_Payment setCcExpYear(int $value) * @method string getCcSsOwner() * @method Mage_Sales_Model_Quote_Payment setCcSsOwner(string $value) * @method int getCcSsStartMonth() * @method Mage_Sales_Model_Quote_Payment setCcSsStartMonth(int $value) * @method int getCcSsStartYear() * @method Mage_Sales_Model_Quote_Payment setCcSsStartYear(int $value) * @method string getCybersourceToken() * @method Mage_Sales_Model_Quote_Payment setCybersourceToken(string $value) * @method string getPaypalCorrelationId() * @method Mage_Sales_Model_Quote_Payment setPaypalCorrelationId(string $value) * @method string getPaypalPayerId() * @method Mage_Sales_Model_Quote_Payment setPaypalPayerId(string $value) * @method string getPaypalPayerStatus() * @method Mage_Sales_Model_Quote_Payment setPaypalPayerStatus(string $value) * @method string getPoNumber() * @method Mage_Sales_Model_Quote_Payment setPoNumber(string $value) * @method string getAdditionalData() * @method Mage_Sales_Model_Quote_Payment setAdditionalData(string $value) * @method string getCcSsIssue() * @method Mage_Sales_Model_Quote_Payment setCcSsIssue(string $value) * @method string getIdealIssuerId() * @method Mage_Sales_Model_Quote_Payment setIdealIssuerId(string $value) * @method string getIdealIssuerList() * @method Mage_Sales_Model_Quote_Payment setIdealIssuerList(string $value) * * @category Mage * @package Mage_Sales * @author Magento Core Team <[email protected]> */ class Mage_Sales_Model_Quote_Payment extends Mage_Payment_Model_Info { protected $_eventPrefix = 'sales_quote_payment'; protected $_eventObject = 'payment'; protected $_quote; /** * Initialize resource model */ protected function _construct() { $this->_init('sales/quote_payment'); } /** * Declare quote model instance * * @param Mage_Sales_Model_Quote $quote * @return Mage_Sales_Model_Quote_Payment */ public function setQuote(Mage_Sales_Model_Quote $quote) { $this->_quote = $quote; $this->setQuoteId($quote->getId()); return $this; } /** * Retrieve quote model instance * * @return Mage_Sales_Model_Quote */ public function getQuote() { return $this->_quote; } /** * Import data array to payment method object, * Method calls quote totals collect because payment method availability * can be related to quote totals * * @param array $data * @throws Mage_Core_Exception * @return Mage_Sales_Model_Quote_Payment */ public function importData(array $data) { $data = new Varien_Object($data); Mage::dispatchEvent( $this->_eventPrefix . '_import_data_before', array( $this->_eventObject=>$this, 'input'=>$data, ) ); $this->setMethod($data->getMethod()); $method = $this->getMethodInstance(); /** * Payment availability related with quote totals. * We have to recollect quote totals before checking */ $this->getQuote()->collectTotals(); if (!$method->isAvailable($this->getQuote()) || !$method->isApplicableToQuote($this->getQuote(), $data->getChecks()) ) { Mage::throwException(Mage::helper('sales')->__('The requested Payment Method is not available.')); } $method->assignData($data); /* * validating the payment data */ $method->validate(); return $this; } /** * Prepare object for save * * @return Mage_Sales_Model_Quote_Payment */ protected function _beforeSave() { if ($this->getQuote()) { $this->setQuoteId($this->getQuote()->getId()); } try { $method = $this->getMethodInstance(); } catch (Mage_Core_Exception $e) { return parent::_beforeSave(); } $method->prepareSave(); return parent::_beforeSave(); } /** * Checkout redirect URL getter * * @return string */ public function getCheckoutRedirectUrl() { $method = $this->getMethodInstance(); if ($method) { return $method->getCheckoutRedirectUrl(); } return ''; } /** * Checkout order place redirect URL getter * * @return string */ public function getOrderPlaceRedirectUrl() { $method = $this->getMethodInstance(); if ($method) { return $method->getOrderPlaceRedirectUrl(); } return ''; } /** * Retrieve payment method model object * * @return Mage_Payment_Model_Method_Abstract */ public function getMethodInstance() { $method = parent::getMethodInstance(); return $method->setStore($this->getQuote()->getStore()); } }
gpl-2.0
holidayfun/inspectIT
inspectIT/src/info/novatec/inspectit/rcp/repository/service/storage/StorageTimerDataAccessService.java
1976
package info.novatec.inspectit.rcp.repository.service.storage; import info.novatec.inspectit.cmr.service.ITimerDataAccessService; import info.novatec.inspectit.communication.data.TimerData; import info.novatec.inspectit.indexing.aggregation.Aggregators; import info.novatec.inspectit.indexing.query.factory.impl.TimerDataQueryFactory; import info.novatec.inspectit.indexing.storage.IStorageTreeComponent; import info.novatec.inspectit.indexing.storage.impl.StorageIndexQuery; import java.util.Date; import java.util.List; /** * {@link ITimerDataAccessService} for storage purposes. * * @author Ivan Senic * */ public class StorageTimerDataAccessService extends AbstractStorageService<TimerData> implements ITimerDataAccessService { /** * Indexing tree. */ private IStorageTreeComponent<TimerData> indexingTree; /** * Index query provider. */ private TimerDataQueryFactory<StorageIndexQuery> timerDataQueryFactory; /** * {@inheritDoc} */ public List<TimerData> getAggregatedTimerData(TimerData timerData) { return this.getAggregatedTimerData(timerData, null, null); } /** * {@inheritDoc} */ public List<TimerData> getAggregatedTimerData(TimerData timerData, Date fromDate, Date toDate) { StorageIndexQuery query = timerDataQueryFactory.getAggregatedTimerDataQuery(timerData, fromDate, toDate); return super.executeQuery(query, Aggregators.TIMER_DATA_AGGREGATOR); } /** * {@inheritDoc} */ protected IStorageTreeComponent<TimerData> getIndexingTree() { return indexingTree; } /** * @param indexingTree * the indexingTree to set */ public void setIndexingTree(IStorageTreeComponent<TimerData> indexingTree) { this.indexingTree = indexingTree; } /** * @param timerDataQueryFactory * the timerDataQueryFactory to set */ public void setTimerDataQueryFactory(TimerDataQueryFactory<StorageIndexQuery> timerDataQueryFactory) { this.timerDataQueryFactory = timerDataQueryFactory; } }
agpl-3.0
upsafety/ToCiteWiki
lib/ical/iCal/Alarm.php
3081
<?php /* PHP iCal Interface Library Copyright (C) 2005 Gregory Szorc <[email protected]> This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA */ /** * iCal (RFC 2245) class definition * * PHP version 5 * * @package iCal * @author Gregory Szorc <[email protected]> * @license http://www.gnu.org/copyleft/lesser.html LGPL License 2.1 */ /** * Alaram extends BaseComponent */ require_once('BaseComponent.php'); /** * File_iCal_Alarm is the user implementation of VALARM * * @category File * @package iCal */ class File_iCal_Alarm extends File_iCal_BaseComponent { //make public from BaseComponent public function addAttachment($a) { File_iCal_BaseComponent::addAttachment($a); } public function getAttachments() { return File_iCal_BaseComponent::getAttachments(); } public function setDescription($d) { File_iCal_BaseComponent::setDescription($d); } public function getDescription() { return File_iCal_BaseComponent::getDescription(); } public function setSummary($s) { File_iCal_BaseComponent::setSummary($s); } public function getSummary() { return File_iCal_BaseComponent::getSummary(); } public function setDuration($d) { File_iCal_BaseComponent::setDuration($d); } public function getDuration() { return File_iCal_BaseComponent::getDuration(); } /** * The action to be invoked when an alarm is triggered */ protected $_action; /** * Set the action to be invoked by this alarm */ public function setAction($a) { } /** * Get the action to be performed by this alarm */ public function getAction() { } /** * The number of times the action should occur */ protected $_repeat; /** * Set the number of times the action should occur */ public function setRepeatCount($r) { } /** * Get the number of times the action should occur */ public function getRepeatCount() { } /** * Specified when an alarm will trigger */ protected $_trigger; /** * Set when the alarm will trigger */ public function setTrigger($t) { } /** * Get when the alarm will trigger */ public function getTrigger() { } }
lgpl-2.1
stephenc/maven
maven-plugin-api/src/main/java/org/apache/maven/plugin/descriptor/MojoDescriptor.java
19637
package org.apache.maven.plugin.descriptor; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.maven.plugin.Mojo; import org.codehaus.plexus.component.repository.ComponentDescriptor; import org.codehaus.plexus.configuration.PlexusConfiguration; import org.codehaus.plexus.configuration.xml.XmlPlexusConfiguration; /** * The bean containing the Mojo descriptor. * <br/> * For more information about the usage tag, have a look to: * <a href="http://maven.apache.org/developers/mojo-api-specification.html"> * http://maven.apache.org/developers/mojo-api-specification.html</a> * * @todo is there a need for the delegation of MavenMojoDescriptor to this? * Why not just extend ComponentDescriptor here? */ public class MojoDescriptor extends ComponentDescriptor<Mojo> implements Cloneable { /** The Plexus component type */ public static final String MAVEN_PLUGIN = "maven-plugin"; /** "once-per-session" execution strategy */ public static final String SINGLE_PASS_EXEC_STRATEGY = "once-per-session"; /** "always" execution strategy */ public static final String MULTI_PASS_EXEC_STRATEGY = "always"; private static final String DEFAULT_INSTANTIATION_STRATEGY = "per-lookup"; private static final String DEFAULT_LANGUAGE = "java"; private List<Parameter> parameters; private Map<String, Parameter> parameterMap; /** By default, the execution strategy is "once-per-session" */ private String executionStrategy = SINGLE_PASS_EXEC_STRATEGY; /** * The goal name for the Mojo, that users will reference from the command line to execute the Mojo directly, or * inside a POM in order to provide Mojo-specific configuration. */ private String goal; /** * Defines a default phase to bind a mojo execution to if the user does not explicitly set a phase in the POM. * <i>Note:</i> This will not automagically make a mojo run when the plugin declaration is added to the POM. It * merely enables the user to omit the <code>&lt;phase&gt;</code> element from the surrounding * <code>&lt;execution&gt;</code> element. */ private String phase; /** Specify the version when the Mojo was added to the API. Similar to Javadoc since. */ private String since; /** Reference the invocation phase of the Mojo. */ private String executePhase; /** Reference the invocation goal of the Mojo. */ private String executeGoal; /** Reference the invocation lifecycle of the Mojo. */ private String executeLifecycle; /** * Specify the version when the Mojo was deprecated to the API. Similar to Javadoc deprecated. This will trigger a * warning when a user tries to configure a parameter marked as deprecated. */ private String deprecated; /** * Flags this Mojo to run it in a multi module way, i.e. aggregate the build with the set of projects listed as * modules. By default, no need to aggregate the Maven project and its child modules */ private boolean aggregator = false; // ---------------------------------------------------------------------- // // ---------------------------------------------------------------------- /** Specify the required dependencies in a specified scope */ private String dependencyResolutionRequired = null; /** * The scope of (transitive) dependencies that should be collected but not resolved. * @since 3.0-alpha-3 */ private String dependencyCollectionRequired; /** By default, the Mojo needs a Maven project to be executed */ private boolean projectRequired = true; /** By default, the Mojo is assumed to work offline as well */ private boolean onlineRequired = false; /** Plugin configuration */ private PlexusConfiguration mojoConfiguration; /** Plugin descriptor */ private PluginDescriptor pluginDescriptor; /** By default, the Mojo is inherited */ private boolean inheritedByDefault = true; /** By default, the Mojo cannot be invoked directly */ private boolean directInvocationOnly = false; /** By default, the Mojo don't need reports to run */ private boolean requiresReports = false; /** * By default, mojos are not threadsafe * @since 3.0-beta-2 */ private boolean threadSafe = false; /** * Default constructor. */ public MojoDescriptor() { setInstantiationStrategy( DEFAULT_INSTANTIATION_STRATEGY ); setComponentFactory( DEFAULT_LANGUAGE ); } // ---------------------------------------------------------------------- // // ---------------------------------------------------------------------- /** * @return the language of this Mojo, i.e. <code>java</code> */ public String getLanguage() { return getComponentFactory(); } /** * @param language the new language */ public void setLanguage( String language ) { setComponentFactory( language ); } /** * @return <code>true</code> if the Mojo is deprecated, <code>false</code> otherwise. */ public String getDeprecated() { return deprecated; } /** * @param deprecated <code>true</code> to deprecate the Mojo, <code>false</code> otherwise. */ public void setDeprecated( String deprecated ) { this.deprecated = deprecated; } /** * @return the list of parameters */ public List<Parameter> getParameters() { return parameters; } /** * @param parameters the new list of parameters * @throws DuplicateParameterException if any */ public void setParameters( List<Parameter> parameters ) throws DuplicateParameterException { for ( Parameter parameter : parameters ) { addParameter( parameter ); } } /** * @param parameter add a new parameter * @throws DuplicateParameterException if any */ public void addParameter( Parameter parameter ) throws DuplicateParameterException { if ( parameters != null && parameters.contains( parameter ) ) { throw new DuplicateParameterException( parameter.getName() + " has been declared multiple times in mojo with goal: " + getGoal() + " (implementation: " + getImplementation() + ")" ); } if ( parameters == null ) { parameters = new LinkedList<>(); } parameters.add( parameter ); } /** * @return the list parameters as a Map */ public Map<String, Parameter> getParameterMap() { if ( parameterMap == null ) { parameterMap = new HashMap<>(); if ( parameters != null ) { for ( Parameter pd : parameters ) { parameterMap.put( pd.getName(), pd ); } } } return parameterMap; } // ---------------------------------------------------------------------- // Dependency requirement // ---------------------------------------------------------------------- /** * @param requiresDependencyResolution the new required dependencies in a specified scope */ public void setDependencyResolutionRequired( String requiresDependencyResolution ) { this.dependencyResolutionRequired = requiresDependencyResolution; } public String getDependencyResolutionRequired() { return dependencyResolutionRequired; } /** * @return the required dependencies in a specified scope * @TODO the name is not intelligible */ @Deprecated public String isDependencyResolutionRequired() { return dependencyResolutionRequired; } /** * @since 3.0-alpha-3 */ public void setDependencyCollectionRequired( String requiresDependencyCollection ) { this.dependencyCollectionRequired = requiresDependencyCollection; } /** * Gets the scope of (transitive) dependencies that should be collected. Dependency collection refers to the process * of calculating the complete dependency tree in terms of artifact coordinates. In contrast to dependency * resolution, this does not include the download of the files for the dependency artifacts. It is meant for mojos * that only want to analyze the set of transitive dependencies, in particular during early lifecycle phases where * full dependency resolution might fail due to projects which haven't been built yet. * * @return The scope of (transitive) dependencies that should be collected or {@code null} if none. * @since 3.0-alpha-3 */ public String getDependencyCollectionRequired() { return dependencyCollectionRequired; } // ---------------------------------------------------------------------- // Project requirement // ---------------------------------------------------------------------- /** * @param requiresProject <code>true</code> if the Mojo needs a Maven project to be executed, <code>false</code> * otherwise. */ public void setProjectRequired( boolean requiresProject ) { this.projectRequired = requiresProject; } /** * @return <code>true</code> if the Mojo needs a Maven project to be executed, <code>false</code> otherwise. */ public boolean isProjectRequired() { return projectRequired; } // ---------------------------------------------------------------------- // Online vs. Offline requirement // ---------------------------------------------------------------------- /** * @param requiresOnline <code>true</code> if the Mojo is online, <code>false</code> otherwise. */ public void setOnlineRequired( boolean requiresOnline ) { this.onlineRequired = requiresOnline; } /** * @return <code>true</code> if the Mojo is online, <code>false</code> otherwise. */ // blech! this isn't even intelligible as a method name. provided for // consistency... public boolean isOnlineRequired() { return onlineRequired; } /** * @return <code>true</code> if the Mojo is online, <code>false</code> otherwise. */ // more english-friendly method...keep the code clean! :) public boolean requiresOnline() { return onlineRequired; } /** * @return the binded phase name of the Mojo */ public String getPhase() { return phase; } /** * @param phase the new binded phase name of the Mojo */ public void setPhase( String phase ) { this.phase = phase; } /** * @return the version when the Mojo was added to the API */ public String getSince() { return since; } /** * @param since the new version when the Mojo was added to the API */ public void setSince( String since ) { this.since = since; } /** * @return The goal name of the Mojo */ public String getGoal() { return goal; } /** * @param goal The new goal name of the Mojo */ public void setGoal( String goal ) { this.goal = goal; } /** * @return the invocation phase of the Mojo */ public String getExecutePhase() { return executePhase; } /** * @param executePhase the new invocation phase of the Mojo */ public void setExecutePhase( String executePhase ) { this.executePhase = executePhase; } /** * @return <code>true</code> if the Mojo uses <code>always</code> for the <code>executionStrategy</code> */ public boolean alwaysExecute() { return MULTI_PASS_EXEC_STRATEGY.equals( executionStrategy ); } /** * @return the execution strategy */ public String getExecutionStrategy() { return executionStrategy; } /** * @param executionStrategy the new execution strategy */ public void setExecutionStrategy( String executionStrategy ) { this.executionStrategy = executionStrategy; } /** * @return the mojo configuration */ public PlexusConfiguration getMojoConfiguration() { if ( mojoConfiguration == null ) { mojoConfiguration = new XmlPlexusConfiguration( "configuration" ); } return mojoConfiguration; } /** * @param mojoConfiguration a new mojo configuration */ public void setMojoConfiguration( PlexusConfiguration mojoConfiguration ) { this.mojoConfiguration = mojoConfiguration; } /** {@inheritDoc} */ public String getRole() { return Mojo.ROLE; } /** {@inheritDoc} */ public String getRoleHint() { return getId(); } /** * @return the id of the mojo, based on the goal name */ public String getId() { return getPluginDescriptor().getId() + ":" + getGoal(); } /** * @return the full goal name * @see PluginDescriptor#getGoalPrefix() * @see #getGoal() */ public String getFullGoalName() { return getPluginDescriptor().getGoalPrefix() + ":" + getGoal(); } /** {@inheritDoc} */ public String getComponentType() { return MAVEN_PLUGIN; } /** * @return the plugin descriptor */ public PluginDescriptor getPluginDescriptor() { return pluginDescriptor; } /** * @param pluginDescriptor the new plugin descriptor */ public void setPluginDescriptor( PluginDescriptor pluginDescriptor ) { this.pluginDescriptor = pluginDescriptor; } /** * @return <code>true</code> if the Mojo is herited, <code>false</code> otherwise. */ public boolean isInheritedByDefault() { return inheritedByDefault; } /** * @param inheritedByDefault <code>true</code> if the Mojo is herited, <code>false</code> otherwise. */ public void setInheritedByDefault( boolean inheritedByDefault ) { this.inheritedByDefault = inheritedByDefault; } /** {@inheritDoc} */ public boolean equals( Object object ) { if ( this == object ) { return true; } if ( object instanceof MojoDescriptor ) { MojoDescriptor other = (MojoDescriptor) object; if ( !compareObjects( getPluginDescriptor(), other.getPluginDescriptor() ) ) { return false; } return compareObjects( getGoal(), other.getGoal() ); } return false; } private boolean compareObjects( Object first, Object second ) { if ( first == second ) { return true; } if ( first == null || second == null ) { return false; } return first.equals( second ); } /** {@inheritDoc} */ public int hashCode() { int result = 1; String goal = getGoal(); if ( goal != null ) { result += goal.hashCode(); } PluginDescriptor pd = getPluginDescriptor(); if ( pd != null ) { result -= pd.hashCode(); } return result; } /** * @return the invocation lifecycle of the Mojo */ public String getExecuteLifecycle() { return executeLifecycle; } /** * @param executeLifecycle the new invocation lifecycle of the Mojo */ public void setExecuteLifecycle( String executeLifecycle ) { this.executeLifecycle = executeLifecycle; } /** * @param aggregator <code>true</code> if the Mojo uses the Maven project and its child modules, * <code>false</code> otherwise. */ public void setAggregator( boolean aggregator ) { this.aggregator = aggregator; } /** * @return <code>true</code> if the Mojo uses the Maven project and its child modules, * <code>false</code> otherwise. */ public boolean isAggregator() { return aggregator; } /** * @return <code>true</code> if the Mojo cannot be invoked directly, <code>false</code> otherwise. */ public boolean isDirectInvocationOnly() { return directInvocationOnly; } /** * @param directInvocationOnly <code>true</code> if the Mojo cannot be invoked directly, * <code>false</code> otherwise. */ public void setDirectInvocationOnly( boolean directInvocationOnly ) { this.directInvocationOnly = directInvocationOnly; } /** * @return <code>true</code> if the Mojo needs reports to run, <code>false</code> otherwise. */ public boolean isRequiresReports() { return requiresReports; } /** * @param requiresReports <code>true</code> if the Mojo needs reports to run, <code>false</code> otherwise. */ public void setRequiresReports( boolean requiresReports ) { this.requiresReports = requiresReports; } /** * @param executeGoal the new invocation goal of the Mojo */ public void setExecuteGoal( String executeGoal ) { this.executeGoal = executeGoal; } /** * @return the invocation goal of the Mojo */ public String getExecuteGoal() { return executeGoal; } /** * @return True if the <code>Mojo</code> is thread-safe and can be run safely in parallel * @since 3.0-beta-2 */ public boolean isThreadSafe() { return threadSafe; } /** * @param threadSafe indicates that the mojo is thread-safe and can be run safely in parallel * @since 3.0-beta-2 */ public void setThreadSafe( boolean threadSafe ) { this.threadSafe = threadSafe; } /** * @return {@code true} if this mojo forks either a goal or the lifecycle, {@code false} otherwise. */ public boolean isForking() { return ( getExecuteGoal() != null && getExecuteGoal().length() > 0 ) || ( getExecutePhase() != null && getExecutePhase().length() > 0 ); } /** * Creates a shallow copy of this mojo descriptor. */ @Override public MojoDescriptor clone() { try { return (MojoDescriptor) super.clone(); } catch ( CloneNotSupportedException e ) { throw new UnsupportedOperationException( e ); } } }
apache-2.0
winger007/zstack
plugin/virtualRouterProvider/src/main/java/org/zstack/network/service/virtualrouter/portforwarding/PortForwardingRemoveVirtualRouterFirewallFlow.java
2697
package org.zstack.network.service.virtualrouter.portforwarding; import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Configurable; import org.zstack.appliancevm.ApplianceVmFacade; import org.zstack.appliancevm.ApplianceVmFirewallRuleInventory; import org.zstack.header.core.workflow.Flow; import org.zstack.header.core.workflow.FlowRollback; import org.zstack.header.core.workflow.FlowTrigger; import org.zstack.header.core.Completion; import org.zstack.header.errorcode.ErrorCode; import org.zstack.network.service.virtualrouter.VirtualRouterConstant; import org.zstack.network.service.virtualrouter.VirtualRouterVmInventory; import org.zstack.utils.Utils; import org.zstack.utils.gson.JSONObjectUtil; import org.zstack.utils.logging.CLogger; import java.util.Arrays; import java.util.Map; /** */ @Configurable(preConstruction = true, autowire = Autowire.BY_TYPE) public class PortForwardingRemoveVirtualRouterFirewallFlow implements Flow { private static final CLogger logger = Utils.getLogger(PortForwardingRemoveVirtualRouterFirewallFlow.class); @Autowired private ApplianceVmFacade apvmf; @Override public void run(final FlowTrigger trigger, final Map data) { final PortForwardingRuleTO to = (PortForwardingRuleTO) data.get(VirtualRouterConstant.VR_PORT_FORWARDING_RULE); final VirtualRouterVmInventory vr = (VirtualRouterVmInventory) data.get(VirtualRouterConstant.VR_RESULT_VM); final String targetL3 = (String) data.get(VirtualRouterConstant.VR_VIP_L3NETWORK); final ApplianceVmFirewallRuleInventory rule = new ApplianceVmFirewallRuleInventory(); rule.setProtocol(to.getProtocolType().toLowerCase()); rule.setDestIp(to.getVipIp()); rule.setEndPort(to.getPrivatePortEnd()); rule.setStartPort(to.getPrivatePortStart()); rule.setAllowCidr(to.getAllowedCidr()); apvmf.removeFirewall(vr.getUuid(), targetL3, Arrays.asList(rule), new Completion(trigger) { @Override public void success() { logger.debug(String.format("successfully removed firewall on virtual route[uuid:%s, name:%s] for port forwarding, firewall rule: %s", vr.getUuid(), vr.getName(), JSONObjectUtil.toJsonString(rule))); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } @Override public void rollback(final FlowRollback trigger, Map data) { trigger.rollback(); } }
apache-2.0
minestarks/TypeScript
tests/cases/fourslash/formattingOnSingleLineBlocks.ts
167
/// <reference path='fourslash.ts' /> ////class C ////{} ////if (true) ////{} format.document(); verify.currentFileContentIs( `class C { } if (true) { }`);
apache-2.0
ensouza93/quickstart
wsba-participant-completion-simple/src/test/java/org/jboss/as/quickstarts/wsba/participantcompletion/simple/ClientStub.java
1572
/* * JBoss, Home of Professional Open Source * Copyright 2013, Red Hat, Inc. and/or its affiliates, and individual * contributors by the @authors tag. See the copyright.txt in the * distribution for a full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.as.quickstarts.wsba.participantcompletion.simple; import javax.inject.Qualifier; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.FIELD; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** * Qualifier for specifying which SetServiceBA implementation to use. * * This Qualifier only supports the ClientStub implementation. The other class to implement SetServiceBA is the SetServiceBAImpl * class, which is not injected by CDI, so does not require Qualifier support. * * @author [email protected], 2012-01-04 */ @Qualifier @Retention(RUNTIME) @Target({ TYPE, FIELD }) public @interface ClientStub { }
apache-2.0
bonifacechacha/spring-android-samples
spring-android-showcase/client/src/org/springframework/android/showcase/AbstractAsyncListActivity.java
1877
/* * Copyright 2010-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.android.showcase; import android.app.ListActivity; import android.app.ProgressDialog; /** * @author Roy Clarkson * @author Pierre-Yves Ricau */ public abstract class AbstractAsyncListActivity extends ListActivity implements AsyncActivity { protected static final String TAG = AbstractAsyncActivity.class.getSimpleName(); private ProgressDialog progressDialog; private boolean destroyed = false; // *************************************** // Activity methods // *************************************** @Override protected void onDestroy() { super.onDestroy(); this.destroyed = true; } // *************************************** // Public methods // *************************************** public void showLoadingProgressDialog() { this.showProgressDialog("Loading. Please wait..."); } public void showProgressDialog(CharSequence message) { if (this.progressDialog == null) { this.progressDialog = new ProgressDialog(this); this.progressDialog.setIndeterminate(true); } this.progressDialog.setMessage(message); this.progressDialog.show(); } public void dismissProgressDialog() { if (this.progressDialog != null && !this.destroyed) { this.progressDialog.dismiss(); } } }
apache-2.0
binhn/fabric
vendor/github.com/Shopify/sarama/decompress.go
1240
package sarama import ( "bytes" "compress/gzip" "fmt" "io/ioutil" "sync" "github.com/eapache/go-xerial-snappy" "github.com/pierrec/lz4" ) var ( lz4ReaderPool = sync.Pool{ New: func() interface{} { return lz4.NewReader(nil) }, } gzipReaderPool sync.Pool ) func decompress(cc CompressionCodec, data []byte) ([]byte, error) { switch cc { case CompressionNone: return data, nil case CompressionGZIP: var ( err error reader *gzip.Reader readerIntf = gzipReaderPool.Get() ) if readerIntf != nil { reader = readerIntf.(*gzip.Reader) } else { reader, err = gzip.NewReader(bytes.NewReader(data)) if err != nil { return nil, err } } defer gzipReaderPool.Put(reader) if err := reader.Reset(bytes.NewReader(data)); err != nil { return nil, err } return ioutil.ReadAll(reader) case CompressionSnappy: return snappy.Decode(data) case CompressionLZ4: reader := lz4ReaderPool.Get().(*lz4.Reader) defer lz4ReaderPool.Put(reader) reader.Reset(bytes.NewReader(data)) return ioutil.ReadAll(reader) case CompressionZSTD: return zstdDecompress(nil, data) default: return nil, PacketDecodingError{fmt.Sprintf("invalid compression specified (%d)", cc)} } }
apache-2.0
tomzhang/kubernetes
pkg/kubelet/server/stats/summary.go
4866
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package stats import ( "fmt" "k8s.io/klog/v2" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" statsapi "k8s.io/kubelet/pkg/apis/stats/v1alpha1" "k8s.io/kubernetes/pkg/kubelet/util" ) // SummaryProvider provides summaries of the stats from Kubelet. type SummaryProvider interface { // Get provides a new Summary with the stats from Kubelet, // and will update some stats if updateStats is true Get(updateStats bool) (*statsapi.Summary, error) // GetCPUAndMemoryStats provides a new Summary with the CPU and memory stats from Kubelet, GetCPUAndMemoryStats() (*statsapi.Summary, error) } // summaryProviderImpl implements the SummaryProvider interface. type summaryProviderImpl struct { // kubeletCreationTime is the time at which the summaryProvider was created. kubeletCreationTime metav1.Time // systemBootTime is the time at which the system was started systemBootTime metav1.Time provider Provider } var _ SummaryProvider = &summaryProviderImpl{} // NewSummaryProvider returns a SummaryProvider using the stats provided by the // specified statsProvider. func NewSummaryProvider(statsProvider Provider) SummaryProvider { kubeletCreationTime := metav1.Now() bootTime, err := util.GetBootTime() if err != nil { // bootTime will be zero if we encounter an error getting the boot time. klog.Warningf("Error getting system boot time. Node metrics will have an incorrect start time: %v", err) } return &summaryProviderImpl{ kubeletCreationTime: kubeletCreationTime, systemBootTime: metav1.NewTime(bootTime), provider: statsProvider, } } func (sp *summaryProviderImpl) Get(updateStats bool) (*statsapi.Summary, error) { // TODO(timstclair): Consider returning a best-effort response if any of // the following errors occur. node, err := sp.provider.GetNode() if err != nil { return nil, fmt.Errorf("failed to get node info: %v", err) } nodeConfig := sp.provider.GetNodeConfig() rootStats, networkStats, err := sp.provider.GetCgroupStats("/", updateStats) if err != nil { return nil, fmt.Errorf("failed to get root cgroup stats: %v", err) } rootFsStats, err := sp.provider.RootFsStats() if err != nil { return nil, fmt.Errorf("failed to get rootFs stats: %v", err) } imageFsStats, err := sp.provider.ImageFsStats() if err != nil { return nil, fmt.Errorf("failed to get imageFs stats: %v", err) } var podStats []statsapi.PodStats if updateStats { podStats, err = sp.provider.ListPodStatsAndUpdateCPUNanoCoreUsage() } else { podStats, err = sp.provider.ListPodStats() } if err != nil { return nil, fmt.Errorf("failed to list pod stats: %v", err) } rlimit, err := sp.provider.RlimitStats() if err != nil { return nil, fmt.Errorf("failed to get rlimit stats: %v", err) } nodeStats := statsapi.NodeStats{ NodeName: node.Name, CPU: rootStats.CPU, Memory: rootStats.Memory, Network: networkStats, StartTime: sp.systemBootTime, Fs: rootFsStats, Runtime: &statsapi.RuntimeStats{ImageFs: imageFsStats}, Rlimit: rlimit, SystemContainers: sp.GetSystemContainersStats(nodeConfig, podStats, updateStats), } summary := statsapi.Summary{ Node: nodeStats, Pods: podStats, } return &summary, nil } func (sp *summaryProviderImpl) GetCPUAndMemoryStats() (*statsapi.Summary, error) { // TODO(timstclair): Consider returning a best-effort response if any of // the following errors occur. node, err := sp.provider.GetNode() if err != nil { return nil, fmt.Errorf("failed to get node info: %v", err) } nodeConfig := sp.provider.GetNodeConfig() rootStats, err := sp.provider.GetCgroupCPUAndMemoryStats("/", false) if err != nil { return nil, fmt.Errorf("failed to get root cgroup stats: %v", err) } podStats, err := sp.provider.ListPodCPUAndMemoryStats() if err != nil { return nil, fmt.Errorf("failed to list pod stats: %v", err) } nodeStats := statsapi.NodeStats{ NodeName: node.Name, CPU: rootStats.CPU, Memory: rootStats.Memory, StartTime: rootStats.StartTime, SystemContainers: sp.GetSystemContainersCPUAndMemoryStats(nodeConfig, podStats, false), } summary := statsapi.Summary{ Node: nodeStats, Pods: podStats, } return &summary, nil }
apache-2.0
c-knowles/kube-aws
vendor/github.com/aws/aws-sdk-go/service/migrationhub/service.go
3188
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. package migrationhub import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/client" "github.com/aws/aws-sdk-go/aws/client/metadata" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/aws/signer/v4" "github.com/aws/aws-sdk-go/private/protocol/jsonrpc" ) // MigrationHub provides the API operation methods for making requests to // AWS Migration Hub. See this package's package overview docs // for details on the service. // // MigrationHub methods are safe to use concurrently. It is not safe to // modify mutate any of the struct's properties though. type MigrationHub struct { *client.Client } // Used for custom client initialization logic var initClient func(*client.Client) // Used for custom request initialization logic var initRequest func(*request.Request) // Service information constants const ( ServiceName = "mgh" // Name of service. EndpointsID = ServiceName // ID to lookup a service endpoint with. ServiceID = "Migration Hub" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the MigrationHub client with a session. // If additional configuration is needed for the client instance use the optional // aws.Config parameter to add your extra config. // // Example: // // Create a MigrationHub client from just a session. // svc := migrationhub.New(mySession) // // // Create a MigrationHub client with additional configuration // svc := migrationhub.New(mySession, aws.NewConfig().WithRegion("us-west-2")) func New(p client.ConfigProvider, cfgs ...*aws.Config) *MigrationHub { c := p.ClientConfig(EndpointsID, cfgs...) return newClient(*c.Config, c.Handlers, c.Endpoint, c.SigningRegion, c.SigningName) } // newClient creates, initializes and returns a new service client instance. func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegion, signingName string) *MigrationHub { svc := &MigrationHub{ Client: client.New( cfg, metadata.ClientInfo{ ServiceName: ServiceName, ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, APIVersion: "2017-05-31", JSONVersion: "1.1", TargetPrefix: "AWSMigrationHub", }, handlers, ), } // Handlers svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler) svc.Handlers.Build.PushBackNamed(jsonrpc.BuildHandler) svc.Handlers.Unmarshal.PushBackNamed(jsonrpc.UnmarshalHandler) svc.Handlers.UnmarshalMeta.PushBackNamed(jsonrpc.UnmarshalMetaHandler) svc.Handlers.UnmarshalError.PushBackNamed(jsonrpc.UnmarshalErrorHandler) // Run custom client initialization if present if initClient != nil { initClient(svc.Client) } return svc } // newRequest creates a new request for a MigrationHub operation and runs any // custom request initialization. func (c *MigrationHub) newRequest(op *request.Operation, params, data interface{}) *request.Request { req := c.NewRequest(op, params, data) // Run custom request initialization if present if initRequest != nil { initRequest(req) } return req }
apache-2.0
gfyoung/elasticsearch
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/OrderBy.java
1683
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.sql.plan.logical; import java.util.List; import java.util.Objects; import org.elasticsearch.xpack.sql.capabilities.Resolvables; import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class OrderBy extends UnaryPlan { private final List<Order> order; public OrderBy(Location location, LogicalPlan child, List<Order> order) { super(location, child); this.order = order; } @Override protected NodeInfo<OrderBy> info() { return NodeInfo.create(this, OrderBy::new, child(), order); } @Override protected OrderBy replaceChild(LogicalPlan newChild) { return new OrderBy(location(), newChild, order); } public List<Order> order() { return order; } @Override public boolean expressionsResolved() { return Resolvables.resolved(order); } @Override public int hashCode() { return Objects.hash(order, child()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } OrderBy other = (OrderBy) obj; return Objects.equals(order, other.order) && Objects.equals(child(), other.child()); } }
apache-2.0
shahrzadmn/vaadin
uitest/src/com/vaadin/tests/tickets/Ticket2204.java
5640
package com.vaadin.tests.tickets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.vaadin.data.Item; import com.vaadin.data.util.BeanItem; import com.vaadin.server.LegacyApplication; import com.vaadin.ui.AbstractOrderedLayout; import com.vaadin.ui.AbstractSplitPanel; import com.vaadin.ui.Accordion; import com.vaadin.ui.Button; import com.vaadin.ui.Button.ClickEvent; import com.vaadin.ui.Button.ClickListener; import com.vaadin.ui.Component; import com.vaadin.ui.ComponentContainer; import com.vaadin.ui.Field; import com.vaadin.ui.Form; import com.vaadin.ui.FormFieldFactory; import com.vaadin.ui.GridLayout; import com.vaadin.ui.HorizontalLayout; import com.vaadin.ui.HorizontalSplitPanel; import com.vaadin.ui.Label; import com.vaadin.ui.LegacyWindow; import com.vaadin.ui.Panel; import com.vaadin.ui.RichTextArea; import com.vaadin.ui.TabSheet; import com.vaadin.ui.VerticalLayout; import com.vaadin.ui.VerticalSplitPanel; public class Ticket2204 extends LegacyApplication { private final List<RichTextArea> textAreas = new ArrayList<RichTextArea>(); private TabSheet ts; private final Map<Component, Component> containerToComponent = new HashMap<Component, Component>(); private RichTextArea rta; private final List<Class<? extends Component>> classes = new ArrayList<Class<? extends Component>>(); protected RichTextArea formTextArea; @Override public void init() { classes.add(VerticalLayout.class); classes.add(HorizontalLayout.class); classes.add(GridLayout.class); classes.add(Accordion.class); classes.add(TabSheet.class); classes.add(Panel.class); classes.add(VerticalSplitPanel.class); classes.add(HorizontalSplitPanel.class); classes.add(Form.class); LegacyWindow w = new LegacyWindow(getClass().getSimpleName()); setMainWindow(w); // setTheme("tests-tickets"); createUI((AbstractOrderedLayout) w.getContent()); } private void createUI(AbstractOrderedLayout layout) { ts = new TabSheet(); layout.addComponent(ts); for (Class<? extends Component> c : classes) { ts.addTab(createComponent(c), c.getSimpleName(), null); } rta = new RichTextArea(); rta.setVisible(false); ts.addTab(rta, "Hidden rta", null); Button b = new Button("Show area", new ClickListener() { @Override public void buttonClick(ClickEvent event) { showHide(); } }); layout.addComponent(b); b = new Button("Show tab", new ClickListener() { @Override public void buttonClick(ClickEvent event) { showTab(); } }); layout.addComponent(b); } protected void showTab() { rta.setVisible(!rta.isVisible()); } protected void showHide() { Component c = containerToComponent.get(ts.getSelectedTab()); c.setVisible(!c.isVisible()); } private Component createComponent(Class<? extends Component> c) { RichTextArea textArea = new RichTextArea(); textArea.setVisible(false); textArea.setCaption("This is the textArea"); textArea.setWidth("200px"); textArea.setHeight("100px"); textAreas.add(textArea); Component cc = null; try { cc = c.newInstance(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } // if (c == OrderedLayout.class) { // cc = new VerticalLayout(); // } else if (c == Accordion.class) { // Label l = new Label("Filler"); // l.setCaption("Filler label"); // cc.addComponent(l); } if (c == Form.class) { Form f = (Form) cc; f.setFormFieldFactory(new FormFieldFactory() { @Override public Field<?> createField(Item item, Object propertyId, Component uiContext) { formTextArea = new RichTextArea(); formTextArea.setVisible(false); return formTextArea; } }); f.setItemDataSource(new BeanItem<Object>(new Object() { private int a; @SuppressWarnings("unused") public int getA() { return a; } @SuppressWarnings("unused") public void setA(int a) { this.a = a; } })); containerToComponent.put(f, formTextArea); return f; } containerToComponent.put(cc, textArea); if (cc instanceof ComponentContainer) { ((ComponentContainer) cc).addComponent(textArea); } if (AbstractSplitPanel.class.isAssignableFrom(c)) { AbstractSplitPanel sp = (AbstractSplitPanel) cc; sp.setWidth("300px"); sp.setHeight("300px"); sp.addComponent(new Label("Label")); textArea.setSizeFull(); } if (c == Panel.class) { VerticalLayout layout = new VerticalLayout(); layout.setMargin(true); ((Panel) cc).setContent(layout); containerToComponent.put(cc, layout); layout.setVisible(false); textArea.setVisible(true); return cc; } return cc; } }
apache-2.0
hwstreaming/flink
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/NullValueSerializer.java
2268
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.api.common.typeutils.base; import org.apache.flink.annotation.Internal; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.types.NullValue; import java.io.IOException; @Internal public final class NullValueSerializer extends TypeSerializerSingleton<NullValue> { private static final long serialVersionUID = 1L; public static final NullValueSerializer INSTANCE = new NullValueSerializer(); @Override public boolean isImmutableType() { return false; } @Override public NullValue createInstance() { return NullValue.getInstance(); } @Override public NullValue copy(NullValue from) { return NullValue.getInstance(); } @Override public NullValue copy(NullValue from, NullValue reuse) { return NullValue.getInstance(); } @Override public int getLength() { return 0; } @Override public void serialize(NullValue record, DataOutputView target) throws IOException { } @Override public NullValue deserialize(DataInputView source) throws IOException { return NullValue.getInstance(); } @Override public NullValue deserialize(NullValue reuse, DataInputView source) throws IOException { return NullValue.getInstance(); } @Override public void copy(DataInputView source, DataOutputView target) throws IOException { } @Override public boolean canEqual(Object obj) { return obj instanceof NullValueSerializer; } }
apache-2.0
coding0011/elasticsearch
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java
9075
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.security.authc.kerberos; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData; import org.ietf.jgss.GSSException; import javax.security.auth.login.LoginException; import java.io.IOException; import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.AdditionalMatchers.aryEq; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; public class KerberosRealmCacheTests extends KerberosRealmTestCase { public void testAuthenticateWithCache() throws LoginException, GSSException { final String username = randomPrincipalName(); final String outToken = randomAlphaOfLength(10); final KerberosRealm kerberosRealm = createKerberosRealm(username); final String expectedUsername = maybeRemoveRealmName(username); final Map<String, Object> metadata = new HashMap<>(); metadata.put(KerberosRealm.KRB_METADATA_REALM_NAME_KEY, realmName(username)); metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, metadata, true); final byte[] decodedTicket = randomByteArrayOfLength(10); final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); // authenticate final User user1 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); // authenticate with cache final User user2 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); assertThat(user1, sameInstance(user2)); verify(mockKerberosTicketValidator, times(2)).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), any(ActionListener.class)); verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); verify(mockNativeRoleMappingStore).resolveRoles(any(UserData.class), any(ActionListener.class)); verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); } public void testCacheInvalidationScenarios() throws LoginException, GSSException { final String outToken = randomAlphaOfLength(10); final List<String> userNames = Arrays.asList(randomPrincipalName(), randomPrincipalName()); final KerberosRealm kerberosRealm = createKerberosRealm(userNames.toArray(new String[0])); verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); final String authNUsername = randomFrom(userNames); final byte[] decodedTicket = randomByteArrayOfLength(10); final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(authNUsername, outToken), null); final String expectedUsername = maybeRemoveRealmName(authNUsername); final Map<String, Object> metadata = new HashMap<>(); metadata.put(KerberosRealm.KRB_METADATA_REALM_NAME_KEY, realmName(authNUsername)); metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, authNUsername); final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, metadata, true); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); final User user1 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); final String expireThisUser = randomFrom(userNames); boolean expireAll = randomBoolean(); if (expireAll) { kerberosRealm.expireAll(); } else { kerberosRealm.expire(maybeRemoveRealmName(expireThisUser)); } final User user2 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); if (expireAll || expireThisUser.equals(authNUsername)) { assertThat(user1, is(not(sameInstance(user2)))); verify(mockNativeRoleMappingStore, times(2)).resolveRoles(any(UserData.class), any(ActionListener.class)); } else { assertThat(user1, sameInstance(user2)); verify(mockNativeRoleMappingStore).resolveRoles(any(UserData.class), any(ActionListener.class)); } verify(mockKerberosTicketValidator, times(2)).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), any(ActionListener.class)); verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); } public void testAuthenticateWithValidTicketSucessAuthnWithUserDetailsWhenCacheDisabled() throws LoginException, GSSException, IOException { // if cache.ttl <= 0 then the cache is disabled settings = buildKerberosRealmSettings(REALM_NAME, writeKeyTab(dir.resolve("key.keytab"), randomAlphaOfLength(4)).toString(), 100, "0m", true, randomBoolean()); final String username = randomPrincipalName(); final String outToken = randomAlphaOfLength(10); final KerberosRealm kerberosRealm = createKerberosRealm(username); final String expectedUsername = maybeRemoveRealmName(username); final Map<String, Object> metadata = new HashMap<>(); metadata.put(KerberosRealm.KRB_METADATA_REALM_NAME_KEY, realmName(username)); metadata.put(KerberosRealm.KRB_METADATA_UPN_KEY, username); final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, metadata, true); final byte[] decodedTicket = randomByteArrayOfLength(10); final Path keytabPath = config.env().configFile().resolve(config.getSetting(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH)); final boolean krbDebug = config.getSetting(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE); mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); // authenticate final User user1 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); // authenticate when cache has been disabled final User user2 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); assertThat(user1, not(sameInstance(user2))); verify(mockKerberosTicketValidator, times(2)).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), any(ActionListener.class)); verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); verify(mockNativeRoleMappingStore, times(2)).resolveRoles(any(UserData.class), any(ActionListener.class)); verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); } private User authenticateAndAssertResult(final KerberosRealm kerberosRealm, final User expectedUser, final KerberosAuthenticationToken kerberosAuthenticationToken, String outToken) { final PlainActionFuture<AuthenticationResult> future = PlainActionFuture.newFuture(); kerberosRealm.authenticate(kerberosAuthenticationToken, future); final AuthenticationResult result = future.actionGet(); assertSuccessAuthenticationResult(expectedUser, outToken, result); return result.getUser(); } }
apache-2.0
c9n/hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AppSchedulingInfo.java
16764
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; import org.apache.hadoop.yarn.util.resource.Resources; /** * This class keeps track of all the consumption of an application. This also * keeps track of current running/completed containers for the application. */ @Private @Unstable public class AppSchedulingInfo { private static final Log LOG = LogFactory.getLog(AppSchedulingInfo.class); private final ApplicationAttemptId applicationAttemptId; final ApplicationId applicationId; private String queueName; Queue queue; final String user; // TODO making containerIdCounter long private final AtomicLong containerIdCounter; private final int EPOCH_BIT_SHIFT = 40; final Set<Priority> priorities = new TreeSet<Priority>( new org.apache.hadoop.yarn.server.resourcemanager.resource.Priority.Comparator()); final Map<Priority, Map<String, ResourceRequest>> requests = new HashMap<Priority, Map<String, ResourceRequest>>(); private Set<String> blacklist = new HashSet<String>(); //private final ApplicationStore store; private ActiveUsersManager activeUsersManager; /* Allocated by scheduler */ boolean pending = true; // for app metrics public AppSchedulingInfo(ApplicationAttemptId appAttemptId, String user, Queue queue, ActiveUsersManager activeUsersManager, long epoch) { this.applicationAttemptId = appAttemptId; this.applicationId = appAttemptId.getApplicationId(); this.queue = queue; this.queueName = queue.getQueueName(); this.user = user; this.activeUsersManager = activeUsersManager; this.containerIdCounter = new AtomicLong(epoch << EPOCH_BIT_SHIFT); } public ApplicationId getApplicationId() { return applicationId; } public ApplicationAttemptId getApplicationAttemptId() { return applicationAttemptId; } public String getQueueName() { return queueName; } public String getUser() { return user; } public synchronized boolean isPending() { return pending; } /** * Clear any pending requests from this application. */ private synchronized void clearRequests() { priorities.clear(); requests.clear(); LOG.info("Application " + applicationId + " requests cleared"); } public long getNewContainerId() { return this.containerIdCounter.incrementAndGet(); } /** * The ApplicationMaster is updating resource requirements for the * application, by asking for more resources and releasing resources acquired * by the application. * * @param requests resources to be acquired * @param recoverPreemptedRequest recover Resource Request on preemption */ synchronized public void updateResourceRequests( List<ResourceRequest> requests, boolean recoverPreemptedRequest) { QueueMetrics metrics = queue.getMetrics(); // Update resource requests for (ResourceRequest request : requests) { Priority priority = request.getPriority(); String resourceName = request.getResourceName(); boolean updatePendingResources = false; ResourceRequest lastRequest = null; if (resourceName.equals(ResourceRequest.ANY)) { if (LOG.isDebugEnabled()) { LOG.debug("update:" + " application=" + applicationId + " request=" + request); } updatePendingResources = true; // Premature optimization? // Assumes that we won't see more than one priority request updated // in one call, reasonable assumption... however, it's totally safe // to activate same application more than once. // Thus we don't need another loop ala the one in decrementOutstanding() // which is needed during deactivate. if (request.getNumContainers() > 0) { activeUsersManager.activateApplication(user, applicationId); } } Map<String, ResourceRequest> asks = this.requests.get(priority); if (asks == null) { asks = new HashMap<String, ResourceRequest>(); this.requests.put(priority, asks); this.priorities.add(priority); } lastRequest = asks.get(resourceName); if (recoverPreemptedRequest && lastRequest != null) { // Increment the number of containers to 1, as it is recovering a // single container. request.setNumContainers(lastRequest.getNumContainers() + 1); } asks.put(resourceName, request); if (updatePendingResources) { // Similarly, deactivate application? if (request.getNumContainers() <= 0) { LOG.info("checking for deactivate... "); checkForDeactivation(); } int lastRequestContainers = lastRequest != null ? lastRequest .getNumContainers() : 0; Resource lastRequestCapability = lastRequest != null ? lastRequest .getCapability() : Resources.none(); metrics.incrPendingResources(user, request.getNumContainers(), request.getCapability()); metrics.decrPendingResources(user, lastRequestContainers, lastRequestCapability); } } } /** * The ApplicationMaster is updating the blacklist * * @param blacklistAdditions resources to be added to the blacklist * @param blacklistRemovals resources to be removed from the blacklist */ synchronized public void updateBlacklist( List<String> blacklistAdditions, List<String> blacklistRemovals) { // Add to blacklist if (blacklistAdditions != null) { blacklist.addAll(blacklistAdditions); } // Remove from blacklist if (blacklistRemovals != null) { blacklist.removeAll(blacklistRemovals); } } synchronized public Collection<Priority> getPriorities() { return priorities; } synchronized public Map<String, ResourceRequest> getResourceRequests( Priority priority) { return requests.get(priority); } synchronized public List<ResourceRequest> getAllResourceRequests() { List<ResourceRequest> ret = new ArrayList<ResourceRequest>(); for (Map<String, ResourceRequest> r : requests.values()) { ret.addAll(r.values()); } return ret; } synchronized public ResourceRequest getResourceRequest(Priority priority, String resourceName) { Map<String, ResourceRequest> nodeRequests = requests.get(priority); return (nodeRequests == null) ? null : nodeRequests.get(resourceName); } public synchronized Resource getResource(Priority priority) { ResourceRequest request = getResourceRequest(priority, ResourceRequest.ANY); return request.getCapability(); } public synchronized boolean isBlacklisted(String resourceName) { return blacklist.contains(resourceName); } /** * Resources have been allocated to this application by the resource * scheduler. Track them. * * @param type * the type of the node * @param node * the nodeinfo of the node * @param priority * the priority of the request. * @param request * the request * @param container * the containers allocated. */ synchronized public List<ResourceRequest> allocate(NodeType type, SchedulerNode node, Priority priority, ResourceRequest request, Container container) { List<ResourceRequest> resourceRequests = new ArrayList<ResourceRequest>(); if (type == NodeType.NODE_LOCAL) { allocateNodeLocal(node, priority, request, container, resourceRequests); } else if (type == NodeType.RACK_LOCAL) { allocateRackLocal(node, priority, request, container, resourceRequests); } else { allocateOffSwitch(node, priority, request, container, resourceRequests); } QueueMetrics metrics = queue.getMetrics(); if (pending) { // once an allocation is done we assume the application is // running from scheduler's POV. pending = false; metrics.runAppAttempt(applicationId, user); } if (LOG.isDebugEnabled()) { LOG.debug("allocate: applicationId=" + applicationId + " container=" + container.getId() + " host=" + container.getNodeId().toString() + " user=" + user + " resource=" + request.getCapability()); } metrics.allocateResources(user, 1, request.getCapability(), true); return resourceRequests; } /** * The {@link ResourceScheduler} is allocating data-local resources to the * application. * * @param allocatedContainers * resources allocated to the application */ synchronized private void allocateNodeLocal(SchedulerNode node, Priority priority, ResourceRequest nodeLocalRequest, Container container, List<ResourceRequest> resourceRequests) { // Update future requirements nodeLocalRequest.setNumContainers(nodeLocalRequest.getNumContainers() - 1); if (nodeLocalRequest.getNumContainers() == 0) { this.requests.get(priority).remove(node.getNodeName()); } ResourceRequest rackLocalRequest = requests.get(priority).get( node.getRackName()); rackLocalRequest.setNumContainers(rackLocalRequest.getNumContainers() - 1); if (rackLocalRequest.getNumContainers() == 0) { this.requests.get(priority).remove(node.getRackName()); } ResourceRequest offRackRequest = requests.get(priority).get( ResourceRequest.ANY); decrementOutstanding(offRackRequest); // Update cloned NodeLocal, RackLocal and OffRack requests for recovery resourceRequests.add(cloneResourceRequest(nodeLocalRequest)); resourceRequests.add(cloneResourceRequest(rackLocalRequest)); resourceRequests.add(cloneResourceRequest(offRackRequest)); } /** * The {@link ResourceScheduler} is allocating data-local resources to the * application. * * @param allocatedContainers * resources allocated to the application */ synchronized private void allocateRackLocal(SchedulerNode node, Priority priority, ResourceRequest rackLocalRequest, Container container, List<ResourceRequest> resourceRequests) { // Update future requirements rackLocalRequest.setNumContainers(rackLocalRequest.getNumContainers() - 1); if (rackLocalRequest.getNumContainers() == 0) { this.requests.get(priority).remove(node.getRackName()); } ResourceRequest offRackRequest = requests.get(priority).get( ResourceRequest.ANY); decrementOutstanding(offRackRequest); // Update cloned RackLocal and OffRack requests for recovery resourceRequests.add(cloneResourceRequest(rackLocalRequest)); resourceRequests.add(cloneResourceRequest(offRackRequest)); } /** * The {@link ResourceScheduler} is allocating data-local resources to the * application. * * @param allocatedContainers * resources allocated to the application */ synchronized private void allocateOffSwitch(SchedulerNode node, Priority priority, ResourceRequest offSwitchRequest, Container container, List<ResourceRequest> resourceRequests) { // Update future requirements decrementOutstanding(offSwitchRequest); // Update cloned OffRack requests for recovery resourceRequests.add(cloneResourceRequest(offSwitchRequest)); } synchronized private void decrementOutstanding( ResourceRequest offSwitchRequest) { int numOffSwitchContainers = offSwitchRequest.getNumContainers() - 1; // Do not remove ANY offSwitchRequest.setNumContainers(numOffSwitchContainers); // Do we have any outstanding requests? // If there is nothing, we need to deactivate this application if (numOffSwitchContainers == 0) { checkForDeactivation(); } } synchronized private void checkForDeactivation() { boolean deactivate = true; for (Priority priority : getPriorities()) { ResourceRequest request = getResourceRequest(priority, ResourceRequest.ANY); if (request.getNumContainers() > 0) { deactivate = false; break; } } if (deactivate) { activeUsersManager.deactivateApplication(user, applicationId); } } synchronized public void move(Queue newQueue) { QueueMetrics oldMetrics = queue.getMetrics(); QueueMetrics newMetrics = newQueue.getMetrics(); for (Map<String, ResourceRequest> asks : requests.values()) { ResourceRequest request = asks.get(ResourceRequest.ANY); if (request != null) { oldMetrics.decrPendingResources(user, request.getNumContainers(), request.getCapability()); newMetrics.incrPendingResources(user, request.getNumContainers(), request.getCapability()); } } oldMetrics.moveAppFrom(this); newMetrics.moveAppTo(this); activeUsersManager.deactivateApplication(user, applicationId); activeUsersManager = newQueue.getActiveUsersManager(); activeUsersManager.activateApplication(user, applicationId); this.queue = newQueue; this.queueName = newQueue.getQueueName(); } synchronized public void stop(RMAppAttemptState rmAppAttemptFinalState) { // clear pending resources metrics for the application QueueMetrics metrics = queue.getMetrics(); for (Map<String, ResourceRequest> asks : requests.values()) { ResourceRequest request = asks.get(ResourceRequest.ANY); if (request != null) { metrics.decrPendingResources(user, request.getNumContainers(), request.getCapability()); } } metrics.finishAppAttempt(applicationId, pending, user); // Clear requests themselves clearRequests(); } public synchronized void setQueue(Queue queue) { this.queue = queue; } public synchronized Set<String> getBlackList() { return this.blacklist; } public synchronized void transferStateFromPreviousAppSchedulingInfo( AppSchedulingInfo appInfo) { // this.priorities = appInfo.getPriorities(); // this.requests = appInfo.getRequests(); this.blacklist = appInfo.getBlackList(); } public synchronized void recoverContainer(RMContainer rmContainer) { QueueMetrics metrics = queue.getMetrics(); if (pending) { // If there was any container to recover, the application was // running from scheduler's POV. pending = false; metrics.runAppAttempt(applicationId, user); } // Container is completed. Skip recovering resources. if (rmContainer.getState().equals(RMContainerState.COMPLETED)) { return; } metrics.allocateResources(user, 1, rmContainer.getAllocatedResource(), false); } public ResourceRequest cloneResourceRequest(ResourceRequest request) { ResourceRequest newRequest = ResourceRequest.newInstance( request.getPriority(), request.getResourceName(), request.getCapability(), 1, request.getRelaxLocality()); return newRequest; } }
apache-2.0
DCSaunders/tensorflow
tensorflow/core/kernels/reverse_op.cc
11332
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ // See docs in ../ops/array_ops.cc #define EIGEN_USE_THREADS #include "tensorflow/core/kernels/reverse_op.h" #include <memory> #include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" #include "tensorflow/core/framework/op_kernel.h" #include "tensorflow/core/framework/register_types.h" #include "tensorflow/core/framework/tensor.h" #include "tensorflow/core/framework/tensor_shape.h" #include "tensorflow/core/framework/types.h" #include "tensorflow/core/kernels/bounds_check.h" #include "tensorflow/core/lib/core/status.h" #include "tensorflow/core/platform/logging.h" namespace tensorflow { typedef Eigen::ThreadPoolDevice CPUDevice; typedef Eigen::GpuDevice GPUDevice; template <typename Device, typename T, int NDIMS> void HandleReverseCase(OpKernelContext* context, typename TTypes<bool, 1>::ConstTensor dims, Tensor* result) { typename Eigen::array<bool, NDIMS> axes_di; for (int i = 0; i < NDIMS; i++) { axes_di[i] = dims(i); } functor::Reverse<Device, T, NDIMS>()(context->eigen_device<Device>(), context->input(0).tensor<T, NDIMS>(), axes_di, result->tensor<T, NDIMS>()); } template <typename Device, typename T> class ReverseOp : public OpKernel { public: explicit ReverseOp(OpKernelConstruction* context) : OpKernel(context) {} void Compute(OpKernelContext* context) override { const Tensor& input = context->input(0); const Tensor& dims = context->input(1); if (TensorShapeUtils::IsScalar(input.shape())) { Tensor* output = nullptr; OP_REQUIRES_OK(context, context->allocate_output(0, input.shape(), &output)); output->scalar<T>() = input.scalar<T>(); } else { const int input_dims = input.dims(); OP_REQUIRES(context, TensorShapeUtils::IsVector(dims.shape()), errors::InvalidArgument("'dims' must be 1-dimension, not ", dims.dims())); OP_REQUIRES( context, input_dims == dims.dim_size(0), errors::InvalidArgument( "'dims' must have the same number of values as 'input' has " "dimensions. 'input' has ", input_dims, "'dims' has ", dims.dim_size(0), " values")); OP_REQUIRES(context, input_dims <= 8, errors::Unimplemented( "reverse is not implemented for tensors of rank > 8.")); Tensor* output = nullptr; OP_REQUIRES_OK(context, context->allocate_output(0, input.shape(), &output)); #define HANDLE_REVERSE(NDIMS) \ case NDIMS: \ HandleReverseCase<Device, T, NDIMS>(context, dims.vec<bool>(), output); \ return; switch (input_dims) { HANDLE_REVERSE(0); HANDLE_REVERSE(1); HANDLE_REVERSE(2); HANDLE_REVERSE(3); HANDLE_REVERSE(4); HANDLE_REVERSE(5); HANDLE_REVERSE(6); HANDLE_REVERSE(7); HANDLE_REVERSE(8); } #undef HANDLE_REVERSE } } }; template <typename Device, typename T, int NDIMS> void HandleReverseV2Case(OpKernelContext* context, const gtl::ArraySlice<bool>& axes, Tensor* result) { typename Eigen::array<bool, NDIMS> axes_di; for (int i = 0; i < NDIMS; i++) { axes_di[i] = axes[i]; } functor::Reverse<Device, T, NDIMS>()(context->eigen_device<Device>(), context->input(0).tensor<T, NDIMS>(), axes_di, result->tensor<T, NDIMS>()); } template <typename Device, typename T> class ReverseV2Op : public OpKernel { public: explicit ReverseV2Op(OpKernelConstruction* context) : OpKernel(context) {} void Compute(OpKernelContext* context) override { const Tensor& input = context->input(0); const Tensor& sparse_dims = context->input(1); if (TensorShapeUtils::IsScalar(input.shape())) { Tensor* output = nullptr; OP_REQUIRES_OK(context, context->allocate_output(0, input.shape(), &output)); output->scalar<T>() = input.scalar<T>(); } else { const int input_dims = input.dims(); const TensorShape& sparse_dims_shape = sparse_dims.shape(); const auto& axes_sparse_flat = sparse_dims.flat<int32>(); OP_REQUIRES(context, TensorShapeUtils::IsVector(sparse_dims_shape), errors::InvalidArgument("'dims' must be 1-dimension, not ", sparse_dims.dims())); gtl::InlinedVector<bool, 8> axes_dense(input_dims, false); for (int dummy = 0; dummy < axes_sparse_flat.size(); dummy++) { int32 axis = internal::SubtleMustCopy<int32>(axes_sparse_flat(dummy)); int32 canonical_axis = axis < 0 ? input_dims + axis : axis; OP_REQUIRES(context, canonical_axis >= 0 && canonical_axis < input_dims, errors::InvalidArgument("'axis'[", dummy, "] = ", axis, " is out of valid range [", 0, ", ", input_dims - 1)); OP_REQUIRES(context, !axes_dense[canonical_axis], errors::InvalidArgument("axis ", canonical_axis, " specified more than once.")); axes_dense[canonical_axis] = true; } OP_REQUIRES(context, input_dims <= 8, errors::Unimplemented( "reverse is not implemented for tensors of rank > 8.")); Tensor* output = nullptr; OP_REQUIRES_OK(context, context->allocate_output(0, input.shape(), &output)); #define HANDLE_REVERSE(NDIMS) \ case NDIMS: \ HandleReverseV2Case<Device, T, NDIMS>(context, axes_dense, output); \ return; switch (input_dims) { HANDLE_REVERSE(0); HANDLE_REVERSE(1); HANDLE_REVERSE(2); HANDLE_REVERSE(3); HANDLE_REVERSE(4); HANDLE_REVERSE(5); HANDLE_REVERSE(6); HANDLE_REVERSE(7); HANDLE_REVERSE(8); } #undef HANDLE_REVERSE } } }; #define REGISTER_KERNELS(T) \ REGISTER_KERNEL_BUILDER(Name("Reverse") \ .Device(DEVICE_CPU) \ .TypeConstraint<T>("T") \ .HostMemory("dims"), \ ReverseOp<CPUDevice, T>) \ REGISTER_KERNEL_BUILDER(Name("ReverseV2") \ .Device(DEVICE_CPU) \ .TypeConstraint<T>("T") \ .TypeConstraint<int32>("Tidx") \ .HostMemory("axis"), \ ReverseV2Op<CPUDevice, T>) TF_CALL_POD_TYPES(REGISTER_KERNELS); #undef REGISTER_KERNELS #if GOOGLE_CUDA // Forward declarations of the function specializations for GPU (to prevent // building the GPU versions here, they will be built compiling _gpu.cu.cc). namespace functor { #define DECLARE_GPU_SPEC_DIM(T, DIM) \ template <> \ void Reverse<GPUDevice, T, DIM>::operator()( \ const GPUDevice& d, typename TTypes<T, DIM>::ConstTensor input, \ const Eigen::array<bool, DIM>& reverse_dims, \ typename TTypes<T, DIM>::Tensor output); \ extern template struct Reverse<GPUDevice, T, DIM>; #define DECLARE_GPU_SPEC(T) \ DECLARE_GPU_SPEC_DIM(T, 0) \ DECLARE_GPU_SPEC_DIM(T, 1) \ DECLARE_GPU_SPEC_DIM(T, 2) \ DECLARE_GPU_SPEC_DIM(T, 3) \ DECLARE_GPU_SPEC_DIM(T, 4) \ DECLARE_GPU_SPEC_DIM(T, 5) \ DECLARE_GPU_SPEC_DIM(T, 6) \ DECLARE_GPU_SPEC_DIM(T, 7) \ DECLARE_GPU_SPEC_DIM(T, 8) TF_CALL_uint8(DECLARE_GPU_SPEC); TF_CALL_int8(DECLARE_GPU_SPEC); TF_CALL_bool(DECLARE_GPU_SPEC); TF_CALL_half(DECLARE_GPU_SPEC); TF_CALL_float(DECLARE_GPU_SPEC); TF_CALL_double(DECLARE_GPU_SPEC); TF_CALL_complex64(DECLARE_GPU_SPEC); TF_CALL_complex128(DECLARE_GPU_SPEC); #undef DECLARE_GPU_SPEC #undef DECLARE_GPU_SPEC_DIM } // namespace functor // Registration of the GPU implementations. #define REGISTER_GPU_KERNELS(T) \ REGISTER_KERNEL_BUILDER(Name("Reverse") \ .Device(DEVICE_GPU) \ .TypeConstraint<T>("T") \ .HostMemory("dims"), \ ReverseOp<GPUDevice, T>) \ REGISTER_KERNEL_BUILDER(Name("ReverseV2") \ .Device(DEVICE_GPU) \ .TypeConstraint<T>("T") \ .TypeConstraint<int32>("Tidx") \ .HostMemory("axis"), \ ReverseV2Op<GPUDevice, T>) TF_CALL_uint8(REGISTER_GPU_KERNELS); TF_CALL_int8(REGISTER_GPU_KERNELS); // TODO decide whether we want to enable the bool kernel. // TF_CALL_bool(REGISTER_GPU_KERNELS); TF_CALL_half(REGISTER_GPU_KERNELS); TF_CALL_float(REGISTER_GPU_KERNELS); TF_CALL_double(REGISTER_GPU_KERNELS); TF_CALL_complex64(REGISTER_GPU_KERNELS); TF_CALL_complex128(REGISTER_GPU_KERNELS); #undef REGISTER_GPU_KERNEL // A special GPU kernel for int32. // TODO(b/25387198): Also enable int32 in device memory. This kernel // registration requires all int32 inputs and outputs to be in host memory. REGISTER_KERNEL_BUILDER(Name("Reverse") .Device(DEVICE_GPU) .TypeConstraint<int32>("T") .HostMemory("tensor") .HostMemory("dims") .HostMemory("output"), ReverseOp<CPUDevice, int32>); REGISTER_KERNEL_BUILDER(Name("ReverseV2") .Device(DEVICE_GPU) .TypeConstraint<int32>("T") .TypeConstraint<int32>("Tidx") .HostMemory("tensor") .HostMemory("axis") .HostMemory("output"), ReverseV2Op<CPUDevice, int32>); #endif // GOOGLE_CUDA } // namespace tensorflow
apache-2.0
paveldk/nativescript-bootstrap
tns_modules/ui/button/button.ios.js
1838
var __extends = this.__extends || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; function __() { this.constructor = d; } __.prototype = b.prototype; d.prototype = new __(); }; var common = require("ui/button/button-common"); var stateChanged = require("ui/core/control-state-change"); var TapHandlerImpl = (function (_super) { __extends(TapHandlerImpl, _super); function TapHandlerImpl() { _super.apply(this, arguments); } TapHandlerImpl.new = function () { return _super.new.call(this); }; TapHandlerImpl.prototype.initWithOwner = function (owner) { this._owner = owner; return this; }; TapHandlerImpl.prototype.tap = function (args) { this._owner._emit(common.knownEvents.tap); }; TapHandlerImpl.ObjCExposedMethods = { "tap": { returns: interop.types.void, params: [interop.types.id] } }; return TapHandlerImpl; })(NSObject); require("utils/module-merge").merge(common, exports); var Button = (function (_super) { __extends(Button, _super); function Button() { var _this = this; _super.call(this); this._ios = UIButton.buttonWithType(UIButtonType.UIButtonTypeSystem); this._tapHandler = TapHandlerImpl.new().initWithOwner(this); this._ios.addTargetActionForControlEvents(this._tapHandler, "tap", UIControlEvents.UIControlEventTouchUpInside); this._stateChangedHandler = new stateChanged.ControlStateChangeListener(this._ios, function (s) { _this._goToVisualState(s); }); } Object.defineProperty(Button.prototype, "ios", { get: function () { return this._ios; }, enumerable: true, configurable: true }); return Button; })(common.Button); exports.Button = Button;
bsd-2-clause
endlessm/chromium-browser
third_party/llvm/compiler-rt/test/asan/TestCases/Windows/dll_noreturn.cpp
891
// RUN: %clang_cl_asan -Od %p/dll_host.cpp -Fe%t // RUN: %clang_cl_asan -LD -Od %s -Fe%t.dll // RUN: not %run %t %t.dll 2>&1 | FileCheck %s #include <process.h> void noreturn_f() { int subscript = -1; char buffer[42]; buffer[subscript] = 42; _exit(1); // CHECK: AddressSanitizer: stack-buffer-underflow on address [[ADDR:0x[0-9a-f]+]] // CHECK: WRITE of size 1 at [[ADDR]] thread T0 // CHECK-NEXT: noreturn_f{{.*}}dll_noreturn.cpp:[[@LINE-4]] // CHECK-NEXT: test_function{{.*}}dll_noreturn.cpp // CHECK-NEXT: main{{.*}}dll_host.cpp // // CHECK: Address [[ADDR]] is located in stack of thread T0 at offset [[OFFSET:.*]] in frame // CHECK-NEXT: noreturn_f{{.*}}dll_noreturn.cpp // CHECK: 'buffer'{{.*}} <== Memory access at offset [[OFFSET]] underflows this variable // CHECK-LABEL: SUMMARY } extern "C" __declspec(dllexport) int test_function() { noreturn_f(); return 0; }
bsd-3-clause
korealerts1/sentry
src/sentry/exceptions.py
445
from __future__ import absolute_import class InvalidData(Exception): pass class InvalidInterface(InvalidData): pass class InvalidRequest(Exception): pass class InvalidOrigin(InvalidRequest): def __init__(self, origin): self.origin = origin def __str__(self): return "Invalid origin: '%s'" % self.origin class CacheNotPopulated(Exception): pass class InvalidConfiguration(Exception): pass
bsd-3-clause
mediathread/mdtprint
app/bower_components/phantom/src/qt/src/gui/kernel/qdesktopwidget_s60.cpp
9072
/**************************************************************************** ** ** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies). ** Contact: http://www.qt-project.org/legal ** ** This file is part of the QtGui module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and Digia. For licensing terms and ** conditions see http://qt.digia.com/licensing. For further information ** use the contact form at http://qt.digia.com/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 as published by the Free Software ** Foundation and appearing in the file LICENSE.LGPL included in the ** packaging of this file. Please review the following information to ** ensure the GNU Lesser General Public License version 2.1 requirements ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** In addition, as a special exception, Digia gives you certain additional ** rights. These rights are described in the Digia Qt LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU ** General Public License version 3.0 as published by the Free Software ** Foundation and appearing in the file LICENSE.GPL included in the ** packaging of this file. Please review the following information to ** ensure the GNU General Public License version 3.0 requirements will be ** met: http://www.gnu.org/copyleft/gpl.html. ** ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #include "qdesktopwidget.h" #include "qapplication_p.h" #include "qwidget_p.h" #include "qt_s60_p.h" #include <w32std.h> #if defined(Q_SYMBIAN_SUPPORTS_MULTIPLE_SCREENS) #include <graphics/displaycontrol.h> #endif QT_BEGIN_NAMESPACE extern int qt_symbian_create_desktop_on_screen; class QSingleDesktopWidget : public QWidget { public: QSingleDesktopWidget(); ~QSingleDesktopWidget(); }; QSingleDesktopWidget::QSingleDesktopWidget() : QWidget(0, Qt::Desktop) { } QSingleDesktopWidget::~QSingleDesktopWidget() { const QObjectList &childList = children(); for (int i = childList.size(); i > 0 ;) { --i; childList.at(i)->setParent(0); } } class QDesktopWidgetPrivate : public QWidgetPrivate { public: QDesktopWidgetPrivate(); ~QDesktopWidgetPrivate(); static void init(QDesktopWidget *that); static void cleanup(); static void init_sys(); static int screenCount; static int primaryScreen; static QVector<QRect> *rects; static QVector<QRect> *workrects; static QVector<QWidget *> *screens; static int refcount; #if defined(Q_SYMBIAN_SUPPORTS_MULTIPLE_SCREENS) static MDisplayControl *displayControl; #endif }; int QDesktopWidgetPrivate::screenCount = 1; int QDesktopWidgetPrivate::primaryScreen = 0; QVector<QRect> *QDesktopWidgetPrivate::rects = 0; QVector<QRect> *QDesktopWidgetPrivate::workrects = 0; QVector<QWidget *> *QDesktopWidgetPrivate::screens = 0; int QDesktopWidgetPrivate::refcount = 0; #if defined(Q_SYMBIAN_SUPPORTS_MULTIPLE_SCREENS) MDisplayControl *QDesktopWidgetPrivate::displayControl = 0; #endif QDesktopWidgetPrivate::QDesktopWidgetPrivate() { ++refcount; } QDesktopWidgetPrivate::~QDesktopWidgetPrivate() { if (!--refcount) cleanup(); } void QDesktopWidgetPrivate::init(QDesktopWidget *that) { // Note that on S^3 devices the screen count retrieved via RWsSession // will always be 2 but the width and height for screen number 1 will // be 0 as long as TV-out is not connected. // // On the other hand a valid size for screen 1 will be reported even // after the cable is disconnected. In order to overcome this, we use // MDisplayControl::NumberOfResolutions() to check if the display is // valid or not. screenCount = S60->screenCount(); #if defined(Q_SYMBIAN_SUPPORTS_MULTIPLE_SCREENS) if (displayControl) { if (displayControl->NumberOfResolutions() < 1) screenCount = 1; } #endif if (screenCount < 1) { qWarning("No screen available"); screenCount = 1; } rects = new QVector<QRect>(); workrects = new QVector<QRect>(); screens = new QVector<QWidget *>(); rects->resize(screenCount); workrects->resize(screenCount); screens->resize(screenCount); for (int i = 0; i < screenCount; ++i) { // All screens will have a position of (0, 0) as there is no true virtual desktop // or pointer event support for multiple screens on Symbian. QRect r(0, 0, S60->screenWidthInPixelsForScreen[i], S60->screenHeightInPixelsForScreen[i]); // Stop here if empty and ignore this screen. if (r.isEmpty()) { screenCount = i; break; } (*rects)[i] = r; QRect wr; if (i == 0) wr = qt_TRect2QRect(S60->clientRect()); else wr = rects->at(i); (*workrects)[i].setRect(wr.x(), wr.y(), wr.width(), wr.height()); (*screens)[i] = 0; } (*screens)[0] = that; } void QDesktopWidgetPrivate::cleanup() { delete rects; rects = 0; delete workrects; workrects = 0; if (screens) { // First item is the QDesktopWidget so skip it. for (int i = 1; i < screens->count(); ++i) delete screens->at(i); } delete screens; screens = 0; } void QDesktopWidgetPrivate::init_sys() { #if defined(Q_SYMBIAN_SUPPORTS_MULTIPLE_SCREENS) if (S60->screenCount() > 1) { CWsScreenDevice *dev = S60->screenDevice(1); if (dev) { displayControl = static_cast<MDisplayControl *>( dev->GetInterface(MDisplayControl::ETypeId)); if (displayControl) { displayControl->EnableDisplayChangeEvents(ETrue); } } } #endif } QDesktopWidget::QDesktopWidget() : QWidget(*new QDesktopWidgetPrivate, 0, Qt::Desktop) { setObjectName(QLatin1String("desktop")); QDesktopWidgetPrivate::init_sys(); QDesktopWidgetPrivate::init(this); } QDesktopWidget::~QDesktopWidget() { } bool QDesktopWidget::isVirtualDesktop() const { return false; } int QDesktopWidget::primaryScreen() const { return QDesktopWidgetPrivate::primaryScreen; } int QDesktopWidget::numScreens() const { Q_D(const QDesktopWidget); return QDesktopWidgetPrivate::screenCount; } static inline QWidget *newSingleDesktopWidget(int screen) { qt_symbian_create_desktop_on_screen = screen; QWidget *w = new QSingleDesktopWidget; qt_symbian_create_desktop_on_screen = -1; return w; } QWidget *QDesktopWidget::screen(int screen) { Q_D(QDesktopWidget); if (screen < 0 || screen >= d->screenCount) screen = d->primaryScreen; if (!d->screens->at(screen) || d->screens->at(screen)->windowType() != Qt::Desktop) (*d->screens)[screen] = newSingleDesktopWidget(screen); return (*d->screens)[screen]; } const QRect QDesktopWidget::availableGeometry(int screen) const { Q_D(const QDesktopWidget); if (screen < 0 || screen >= d->screenCount) screen = d->primaryScreen; return d->workrects->at(screen); } const QRect QDesktopWidget::screenGeometry(int screen) const { Q_D(const QDesktopWidget); if (screen < 0 || screen >= d->screenCount) screen = d->primaryScreen; return d->rects->at(screen); } int QDesktopWidget::screenNumber(const QWidget *widget) const { Q_D(const QDesktopWidget); return widget ? S60->screenNumberForWidget(widget) : d->primaryScreen; } int QDesktopWidget::screenNumber(const QPoint &point) const { Q_UNUSED(point); Q_D(const QDesktopWidget); return d->primaryScreen; } void QDesktopWidget::resizeEvent(QResizeEvent *) { Q_D(QDesktopWidget); QVector<QRect> oldrects; oldrects = *d->rects; QVector<QRect> oldworkrects; oldworkrects = *d->workrects; int oldscreencount = d->screenCount; QDesktopWidgetPrivate::cleanup(); QDesktopWidgetPrivate::init(this); for (int i = 0; i < qMin(oldscreencount, d->screenCount); ++i) { QRect oldrect = oldrects[i]; QRect newrect = d->rects->at(i); if (oldrect != newrect) emit resized(i); } for (int j = 0; j < qMin(oldscreencount, d->screenCount); ++j) { QRect oldrect = oldworkrects[j]; QRect newrect = d->workrects->at(j); if (oldrect != newrect) emit workAreaResized(j); } if (oldscreencount != d->screenCount) { emit screenCountChanged(d->screenCount); } } QT_END_NAMESPACE
mit
ralzate/Produccion
vendor/bundle/ruby/2.2.0/gems/fog-storm_on_demand-0.1.1/lib/fog/support/storm_on_demand/requests/add_transaction_feedback.rb
312
module Fog module Support class StormOnDemand class Real def add_transaction_feedback(options = {}) request( :path => "/Support/Ticket/addTransactionFeedback", :body => Fog::JSON.encode(:params => options) ) end end end end end
mit
Kotpes/kateoleshova
web/app/plugins/advanced-custom-fields-pro/core/field.php
8707
<?php if( ! class_exists('acf_field') ) : class acf_field { // vars var $name = '', $label = '', $category = 'basic', $defaults = array(), $l10n = array(), $public = true; /* * __construct * * This construcor registeres many actions and filters * * @type function * @date 5/03/2014 * @since 5.0.0 * * @param n/a * @return n/a */ function __construct() { // info $this->add_filter('acf/get_field_types', array($this, 'get_field_types'), 10, 1); // value $this->add_field_filter('acf/load_value', array($this, 'load_value'), 10, 3); $this->add_field_filter('acf/update_value', array($this, 'update_value'), 10, 3); $this->add_field_filter('acf/format_value', array($this, 'format_value'), 10, 3); $this->add_field_filter('acf/validate_value', array($this, 'validate_value'), 10, 4); $this->add_field_action('acf/delete_value', array($this, 'delete_value'), 10, 3); // field $this->add_field_filter('acf/validate_field', array($this, 'validate_field'), 10, 1); $this->add_field_filter('acf/load_field', array($this, 'load_field'), 10, 1); $this->add_field_filter('acf/update_field', array($this, 'update_field'), 10, 1); $this->add_field_filter('acf/duplicate_field', array($this, 'duplicate_field'), 10, 1); $this->add_field_action('acf/delete_field', array($this, 'delete_field'), 10, 1); $this->add_field_action('acf/render_field', array($this, 'render_field'), 9, 1); $this->add_field_action('acf/render_field_settings', array($this, 'render_field_settings'), 9, 1); $this->add_field_filter('acf/prepare_field', array($this, 'prepare_field'), 10, 1); $this->add_field_filter('acf/translate_field', array($this, 'translate_field'), 10, 1); // input actions $this->add_action("acf/input/admin_enqueue_scripts", array($this, 'input_admin_enqueue_scripts'), 10, 0); $this->add_action("acf/input/admin_head", array($this, 'input_admin_head'), 10, 0); $this->add_action("acf/input/form_data", array($this, 'input_form_data'), 10, 1); $this->add_filter("acf/input/admin_l10n", array($this, 'input_admin_l10n'), 10, 1); $this->add_action("acf/input/admin_footer", array($this, 'input_admin_footer'), 10, 1); // field group actions $this->add_action("acf/field_group/admin_enqueue_scripts", array($this, 'field_group_admin_enqueue_scripts'), 10, 0); $this->add_action("acf/field_group/admin_head", array($this, 'field_group_admin_head'), 10, 0); $this->add_action("acf/field_group/admin_footer", array($this, 'field_group_admin_footer'), 10, 0); } /* * add_filter * * This function checks if the function is_callable before adding the filter * * @type function * @date 5/03/2014 * @since 5.0.0 * * @param $tag (string) * @param $function_to_add (string) * @param $priority (int) * @param $accepted_args (int) * @return n/a */ function add_filter( $tag = '', $function_to_add = '', $priority = 10, $accepted_args = 1 ) { // bail early if no callable if( !is_callable($function_to_add) ) return; // add add_filter( $tag, $function_to_add, $priority, $accepted_args ); } /* * add_field_filter * * This function will add a field type specific filter * * @type function * @date 29/09/2016 * @since 5.4.0 * * @param $tag (string) * @param $function_to_add (string) * @param $priority (int) * @param $accepted_args (int) * @return n/a */ function add_field_filter( $tag = '', $function_to_add = '', $priority = 10, $accepted_args = 1 ) { // append $tag .= '/type=' . $this->name; // add $this->add_filter( $tag, $function_to_add, $priority, $accepted_args ); } /* * add_action * * This function checks if the function is_callable before adding the action * * @type function * @date 5/03/2014 * @since 5.0.0 * * @param $tag (string) * @param $function_to_add (string) * @param $priority (int) * @param $accepted_args (int) * @return n/a */ function add_action( $tag = '', $function_to_add = '', $priority = 10, $accepted_args = 1 ) { // bail early if no callable if( !is_callable($function_to_add) ) return; // add add_action( $tag, $function_to_add, $priority, $accepted_args ); } /* * add_field_action * * This function will add a field type specific filter * * @type function * @date 29/09/2016 * @since 5.4.0 * * @param $tag (string) * @param $function_to_add (string) * @param $priority (int) * @param $accepted_args (int) * @return n/a */ function add_field_action( $tag = '', $function_to_add = '', $priority = 10, $accepted_args = 1 ) { // append $tag .= '/type=' . $this->name; // add $this->add_action( $tag, $function_to_add, $priority, $accepted_args ); } /* * get_field_types() * * This function will append the current field type to the list of available field types * * @type function * @since 3.6 * @date 23/01/13 * * @param $fields (array) * @return $fields */ function get_field_types( $types ) { // append $types[ $this->name ] = array( 'label' => $this->label, 'name' => $this->name, 'category' => $this->category, 'public' => $this->public ); // return return $types; } /* * validate_field * * This function will append default settings to a field * * @type filter ("acf/validate_field/type={$this->name}") * @since 3.6 * @date 23/01/13 * * @param $field (array) * @return $field (array) */ function validate_field( $field ) { // bail early if no defaults if( !is_array($this->defaults) ) return $field; // merge in defaults but keep order of $field keys foreach( $this->defaults as $k => $v ) { if( !isset($field[ $k ]) ) $field[ $k ] = $v; } // return return $field; } /* * admin_l10n * * This function will append l10n text translations to an array which is later passed to JS * * @type filter ("acf/input/admin_l10n") * @since 3.6 * @date 23/01/13 * * @param $l10n (array) * @return $l10n (array) */ function input_admin_l10n( $l10n ) { // bail early if no defaults if( empty($this->l10n) ) return $l10n; // append $l10n[ $this->name ] = $this->l10n; // return return $l10n; } } endif; // class_exists check /* * acf_get_field_types * * This function will return an array containing info about all field types * * @type function * @date 22/10/16 * @since 5.5.0 * * @param n/a * @return (array) */ function acf_get_field_types() { // vars $cache_key = 'acf_get_field_types'; // check cache if( acf_isset_cache($cache_key) ) return acf_get_cache($cache_key); // get types $types = apply_filters('acf/get_field_types', array()); // update cache acf_set_cache($cache_key, $types); // return return $types; } /* * acf_get_grouped_field_types * * This function will return a grouped array of fields types (category => name) * * @type function * @date 1/10/13 * @since 5.0.0 * * @param n/a * @return (array) */ function acf_get_grouped_field_types() { // vars $types = array(); $l10n = array( 'basic' => __('Basic', 'acf'), 'content' => __('Content', 'acf'), 'choice' => __('Choice', 'acf'), 'relational' => __('Relational', 'acf'), 'jquery' => __('jQuery', 'acf'), 'layout' => __('Layout', 'acf'), ); // get field type information $types_info = acf_get_field_types(); // loop foreach( $types_info as $info ) { // bail early if not public if( !$info['public'] ) continue; // vars $cat = $info['category']; // default to basic if( !$cat ) $cat = 'basic'; // translate $cat = isset($l10n[ $cat ]) ? $l10n[ $cat ] : $cat; // append $types[ $cat ][ $info['name'] ] = $info['label']; } // return return $types; } /* * acf_get_field_type_label * * This function will return the label of a field type * * @type function * @date 1/10/13 * @since 5.0.0 * * @param n/a * @return (array) */ function acf_get_field_type_label( $type = '' ) { // vars $types = acf_get_field_types(); // bail early if doesn't exist if( !isset($types[ $type ]) ) return ''; // return return $types[ $type ]['label']; } /* * acf_field_type_exists * * This function will check if the field_type exists * * @type function * @date 1/10/13 * @since 5.0.0 * * @param $type (string) * @return (boolean) */ function acf_field_type_exists( $type = '' ) { // vars $types = acf_get_field_types(); // return return isset($types[ $type ]); } ?>
mit
fatihwk/Rocket.Chat
packages/rocketchat-lib/server/methods/saveSetting.js
969
/* eslint new-cap: 0 */ Meteor.methods({ saveSetting(_id, value, editor) { if (Meteor.userId() === null) { throw new Meteor.Error('error-action-not-allowed', 'Editing settings is not allowed', { method: 'saveSetting' }); } if (!RocketChat.authz.hasPermission(Meteor.userId(), 'edit-privileged-setting')) { throw new Meteor.Error('error-action-not-allowed', 'Editing settings is not allowed', { method: 'saveSetting' }); } //Verify the _id passed in is a string. check(_id, String); const setting = RocketChat.models.Settings.db.findOneById(_id); //Verify the value is what it should be switch (setting.type) { case 'roomPick': check(value, Match.OneOf([Object], '')); break; case 'boolean': check(value, Boolean); break; case 'int': check(value, Number); break; default: check(value, String); break; } RocketChat.settings.updateById(_id, value, editor); return true; } });
mit
JayCanuck/enyo-2-components
bookmarklet/bookmarklet-bootplate/enyo/source/package.js
61
enyo.depends( "kernel", "ajax", "dom", "touch", "ui" );
mit
subhasisghosal/NDL_ErrorLogAnalysisPortal
LogAnalysisServer/node_modules/mongoose/lib/aggregate.js
21647
/*! * Module dependencies */ var AggregationCursor = require('./cursor/AggregationCursor'); var PromiseProvider = require('./promise_provider'); var Query = require('./query'); var eachAsync = require('./services/cursor/eachAsync'); var util = require('util'); var utils = require('./utils'); var read = Query.prototype.read; /** * Aggregate constructor used for building aggregation pipelines. * * ####Example: * * new Aggregate(); * new Aggregate({ $project: { a: 1, b: 1 } }); * new Aggregate({ $project: { a: 1, b: 1 } }, { $skip: 5 }); * new Aggregate([{ $project: { a: 1, b: 1 } }, { $skip: 5 }]); * * Returned when calling Model.aggregate(). * * ####Example: * * Model * .aggregate({ $match: { age: { $gte: 21 }}}) * .unwind('tags') * .exec(callback) * * ####Note: * * - The documents returned are plain javascript objects, not mongoose documents (since any shape of document can be returned). * - Requires MongoDB >= 2.1 * - Mongoose does **not** cast pipeline stages. `new Aggregate({ $match: { _id: '00000000000000000000000a' } });` will not work unless `_id` is a string in the database. Use `new Aggregate({ $match: { _id: mongoose.Types.ObjectId('00000000000000000000000a') } });` instead. * * @see MongoDB http://docs.mongodb.org/manual/applications/aggregation/ * @see driver http://mongodb.github.com/node-mongodb-native/api-generated/collection.html#aggregate * @param {Object|Array} [ops] aggregation operator(s) or operator array * @api public */ function Aggregate() { this._pipeline = []; this._model = undefined; this.options = undefined; if (arguments.length === 1 && util.isArray(arguments[0])) { this.append.apply(this, arguments[0]); } else { this.append.apply(this, arguments); } } /** * Binds this aggregate to a model. * * @param {Model} model the model to which the aggregate is to be bound * @return {Aggregate} * @api public */ Aggregate.prototype.model = function(model) { this._model = model; return this; }; /** * Appends new operators to this aggregate pipeline * * ####Examples: * * aggregate.append({ $project: { field: 1 }}, { $limit: 2 }); * * // or pass an array * var pipeline = [{ $match: { daw: 'Logic Audio X' }} ]; * aggregate.append(pipeline); * * @param {Object} ops operator(s) to append * @return {Aggregate} * @api public */ Aggregate.prototype.append = function() { var args = (arguments.length === 1 && util.isArray(arguments[0])) ? arguments[0] : utils.args(arguments); if (!args.every(isOperator)) { throw new Error('Arguments must be aggregate pipeline operators'); } this._pipeline = this._pipeline.concat(args); return this; }; /** * Appends a new $project operator to this aggregate pipeline. * * Mongoose query [selection syntax](#query_Query-select) is also supported. * * ####Examples: * * // include a, include b, exclude _id * aggregate.project("a b -_id"); * * // or you may use object notation, useful when * // you have keys already prefixed with a "-" * aggregate.project({a: 1, b: 1, _id: 0}); * * // reshaping documents * aggregate.project({ * newField: '$b.nested' * , plusTen: { $add: ['$val', 10]} * , sub: { * name: '$a' * } * }) * * // etc * aggregate.project({ salary_k: { $divide: [ "$salary", 1000 ] } }); * * @param {Object|String} arg field specification * @see projection http://docs.mongodb.org/manual/reference/aggregation/project/ * @return {Aggregate} * @api public */ Aggregate.prototype.project = function(arg) { var fields = {}; if (typeof arg === 'object' && !util.isArray(arg)) { Object.keys(arg).forEach(function(field) { fields[field] = arg[field]; }); } else if (arguments.length === 1 && typeof arg === 'string') { arg.split(/\s+/).forEach(function(field) { if (!field) { return; } var include = field[0] === '-' ? 0 : 1; if (include === 0) { field = field.substring(1); } fields[field] = include; }); } else { throw new Error('Invalid project() argument. Must be string or object'); } return this.append({$project: fields}); }; /** * Appends a new custom $group operator to this aggregate pipeline. * * ####Examples: * * aggregate.group({ _id: "$department" }); * * @see $group http://docs.mongodb.org/manual/reference/aggregation/group/ * @method group * @memberOf Aggregate * @param {Object} arg $group operator contents * @return {Aggregate} * @api public */ /** * Appends a new custom $match operator to this aggregate pipeline. * * ####Examples: * * aggregate.match({ department: { $in: [ "sales", "engineering" ] } }); * * @see $match http://docs.mongodb.org/manual/reference/aggregation/match/ * @method match * @memberOf Aggregate * @param {Object} arg $match operator contents * @return {Aggregate} * @api public */ /** * Appends a new $skip operator to this aggregate pipeline. * * ####Examples: * * aggregate.skip(10); * * @see $skip http://docs.mongodb.org/manual/reference/aggregation/skip/ * @method skip * @memberOf Aggregate * @param {Number} num number of records to skip before next stage * @return {Aggregate} * @api public */ /** * Appends a new $limit operator to this aggregate pipeline. * * ####Examples: * * aggregate.limit(10); * * @see $limit http://docs.mongodb.org/manual/reference/aggregation/limit/ * @method limit * @memberOf Aggregate * @param {Number} num maximum number of records to pass to the next stage * @return {Aggregate} * @api public */ /** * Appends a new $geoNear operator to this aggregate pipeline. * * ####NOTE: * * **MUST** be used as the first operator in the pipeline. * * ####Examples: * * aggregate.near({ * near: [40.724, -73.997], * distanceField: "dist.calculated", // required * maxDistance: 0.008, * query: { type: "public" }, * includeLocs: "dist.location", * uniqueDocs: true, * num: 5 * }); * * @see $geoNear http://docs.mongodb.org/manual/reference/aggregation/geoNear/ * @method near * @memberOf Aggregate * @param {Object} parameters * @return {Aggregate} * @api public */ Aggregate.prototype.near = function(arg) { var op = {}; op.$geoNear = arg; return this.append(op); }; /*! * define methods */ 'group match skip limit out'.split(' ').forEach(function($operator) { Aggregate.prototype[$operator] = function(arg) { var op = {}; op['$' + $operator] = arg; return this.append(op); }; }); /** * Appends new custom $unwind operator(s) to this aggregate pipeline. * * Note that the `$unwind` operator requires the path name to start with '$'. * Mongoose will prepend '$' if the specified field doesn't start '$'. * * ####Examples: * * aggregate.unwind("tags"); * aggregate.unwind("a", "b", "c"); * * @see $unwind http://docs.mongodb.org/manual/reference/aggregation/unwind/ * @param {String} fields the field(s) to unwind * @return {Aggregate} * @api public */ Aggregate.prototype.unwind = function() { var args = utils.args(arguments); var res = []; for (var i = 0; i < args.length; ++i) { var arg = args[i]; if (arg && typeof arg === 'object') { res.push({ $unwind: arg }); } else if (typeof arg === 'string') { res.push({ $unwind: (arg && arg.charAt(0) === '$') ? arg : '$' + arg }); } else { throw new Error('Invalid arg "' + arg + '" to unwind(), ' + 'must be string or object'); } } return this.append.apply(this, res); }; /** * Appends new custom $lookup operator(s) to this aggregate pipeline. * * ####Examples: * * aggregate.lookup({ from: 'users', localField: 'userId', foreignField: '_id', as: 'users' }); * * @see $lookup https://docs.mongodb.org/manual/reference/operator/aggregation/lookup/#pipe._S_lookup * @param {Object} options to $lookup as described in the above link * @return {Aggregate} * @api public */ Aggregate.prototype.lookup = function(options) { return this.append({$lookup: options}); }; /** * Appends new custom $graphLookup operator(s) to this aggregate pipeline, performing a recursive search on a collection. * * Note that graphLookup can only consume at most 100MB of memory, and does not allow disk use even if `{ allowDiskUse: true }` is specified. * * #### Examples: * // Suppose we have a collection of courses, where a document might look like `{ _id: 0, name: 'Calculus', prerequisite: 'Trigonometry'}` and `{ _id: 0, name: 'Trigonometry', prerequisite: 'Algebra' }` * aggregate.graphLookup({ from: 'courses', startWith: '$prerequisite', connectFromField: 'prerequisite', connectToField: 'name', as: 'prerequisites', maxDepth: 3 }) // this will recursively search the 'courses' collection up to 3 prerequisites * * @see $graphLookup https://docs.mongodb.com/manual/reference/operator/aggregation/graphLookup/#pipe._S_graphLookup * @param {Object} options to $graphLookup as described in the above link * @return {Aggregate} * @api public */ Aggregate.prototype.graphLookup = function(options) { var cloneOptions = {}; if (options) { if (!utils.isObject(options)) { throw new TypeError('Invalid graphLookup() argument. Must be an object.'); } utils.mergeClone(cloneOptions, options); var startWith = cloneOptions.startWith; if (startWith && typeof startWith === 'string') { cloneOptions.startWith = cloneOptions.startWith.charAt(0) === '$' ? cloneOptions.startWith : '$' + cloneOptions.startWith; } } return this.append({ $graphLookup: cloneOptions }); }; /** * Appepnds new custom $sample operator(s) to this aggregate pipeline. * * ####Examples: * * aggregate.sample(3); // Add a pipeline that picks 3 random documents * * @see $sample https://docs.mongodb.org/manual/reference/operator/aggregation/sample/#pipe._S_sample * @param {Number} size number of random documents to pick * @return {Aggregate} * @api public */ Aggregate.prototype.sample = function(size) { return this.append({$sample: {size: size}}); }; /** * Appends a new $sort operator to this aggregate pipeline. * * If an object is passed, values allowed are `asc`, `desc`, `ascending`, `descending`, `1`, and `-1`. * * If a string is passed, it must be a space delimited list of path names. The sort order of each path is ascending unless the path name is prefixed with `-` which will be treated as descending. * * ####Examples: * * // these are equivalent * aggregate.sort({ field: 'asc', test: -1 }); * aggregate.sort('field -test'); * * @see $sort http://docs.mongodb.org/manual/reference/aggregation/sort/ * @param {Object|String} arg * @return {Aggregate} this * @api public */ Aggregate.prototype.sort = function(arg) { // TODO refactor to reuse the query builder logic var sort = {}; if (arg.constructor.name === 'Object') { var desc = ['desc', 'descending', -1]; Object.keys(arg).forEach(function(field) { // If sorting by text score, skip coercing into 1/-1 if (arg[field] instanceof Object && arg[field].$meta) { sort[field] = arg[field]; return; } sort[field] = desc.indexOf(arg[field]) === -1 ? 1 : -1; }); } else if (arguments.length === 1 && typeof arg === 'string') { arg.split(/\s+/).forEach(function(field) { if (!field) { return; } var ascend = field[0] === '-' ? -1 : 1; if (ascend === -1) { field = field.substring(1); } sort[field] = ascend; }); } else { throw new TypeError('Invalid sort() argument. Must be a string or object.'); } return this.append({$sort: sort}); }; /** * Sets the readPreference option for the aggregation query. * * ####Example: * * Model.aggregate(..).read('primaryPreferred').exec(callback) * * @param {String} pref one of the listed preference options or their aliases * @param {Array} [tags] optional tags for this query * @see mongodb http://docs.mongodb.org/manual/applications/replication/#read-preference * @see driver http://mongodb.github.com/node-mongodb-native/driver-articles/anintroductionto1_1and2_2.html#read-preferences */ Aggregate.prototype.read = function(pref, tags) { if (!this.options) { this.options = {}; } read.call(this, pref, tags); return this; }; /** * Execute the aggregation with explain * * ####Example: * * Model.aggregate(..).explain(callback) * * @param {Function} callback * @return {Promise} */ Aggregate.prototype.explain = function(callback) { var _this = this; var Promise = PromiseProvider.get(); return new Promise.ES6(function(resolve, reject) { if (!_this._pipeline.length) { var err = new Error('Aggregate has empty pipeline'); if (callback) { callback(err); } reject(err); return; } prepareDiscriminatorPipeline(_this); _this._model .collection .aggregate(_this._pipeline, _this.options || {}) .explain(function(error, result) { if (error) { if (callback) { callback(error); } reject(error); return; } if (callback) { callback(null, result); } resolve(result); }); }); }; /** * Sets the allowDiskUse option for the aggregation query (ignored for < 2.6.0) * * ####Example: * * Model.aggregate(..).allowDiskUse(true).exec(callback) * * @param {Boolean} value Should tell server it can use hard drive to store data during aggregation. * @param {Array} [tags] optional tags for this query * @see mongodb http://docs.mongodb.org/manual/reference/command/aggregate/ */ Aggregate.prototype.allowDiskUse = function(value) { if (!this.options) { this.options = {}; } this.options.allowDiskUse = value; return this; }; /** * Sets the cursor option option for the aggregation query (ignored for < 2.6.0). * Note the different syntax below: .exec() returns a cursor object, and no callback * is necessary. * * ####Example: * * var cursor = Model.aggregate(..).cursor({ batchSize: 1000 }).exec(); * cursor.each(function(error, doc) { * // use doc * }); * * @param {Object} options * @param {Number} options.batchSize set the cursor batch size * @param {Boolean} [options.useMongooseAggCursor] use experimental mongoose-specific aggregation cursor (for `eachAsync()` and other query cursor semantics) * @see mongodb http://mongodb.github.io/node-mongodb-native/2.0/api/AggregationCursor.html */ Aggregate.prototype.cursor = function(options) { if (!this.options) { this.options = {}; } this.options.cursor = options || {}; return this; }; /** * Adds a [cursor flag](http://mongodb.github.io/node-mongodb-native/2.2/api/Cursor.html#addCursorFlag) * * ####Example: * * Model.aggregate(..).addCursorFlag('noCursorTimeout', true).exec(); * * @param {String} flag * @param {Boolean} value * @see mongodb http://mongodb.github.io/node-mongodb-native/2.2/api/Cursor.html#addCursorFlag */ Aggregate.prototype.addCursorFlag = function(flag, value) { if (!this.options) { this.options = {}; } this.options[flag] = value; return this; }; /** * Adds a collation * * ####Example: * * Model.aggregate(..).collation({ locale: 'en_US', strength: 1 }).exec(); * * @param {Object} collation options * @param {Boolean} value * @see mongodb http://mongodb.github.io/node-mongodb-native/2.2/api/Collection.html#aggregate */ Aggregate.prototype.collation = function(collation) { if (!this.options) { this.options = {}; } this.options.collation = collation; return this; }; /** * Combines multiple aggregation pipelines. * * ####Example: * Model.aggregate(...) * .facet({ * books: [{ groupBy: '$author' }], * price: [{ $bucketAuto: { groupBy: '$price', buckets: 2 } }] * }) * .exec(); * * // Output: { books: [...], price: [{...}, {...}] } * * @param {Object} facet options * @return {Aggregate} this * @see $facet https://docs.mongodb.com/v3.4/reference/operator/aggregation/facet/ * @api public */ Aggregate.prototype.facet = function(options) { return this.append({$facet: options}); }; /** * Executes the aggregate pipeline on the currently bound Model. * * ####Example: * * aggregate.exec(callback); * * // Because a promise is returned, the `callback` is optional. * var promise = aggregate.exec(); * promise.then(..); * * @see Promise #promise_Promise * @param {Function} [callback] * @return {Promise} * @api public */ Aggregate.prototype.exec = function(callback) { if (!this._model) { throw new Error('Aggregate not bound to any Model'); } var _this = this; var Promise = PromiseProvider.get(); var options = utils.clone(this.options); if (options && options.cursor) { if (options.cursor.async) { delete options.cursor.async; return new Promise.ES6(function(resolve) { if (!_this._model.collection.buffer) { process.nextTick(function() { var cursor = _this._model.collection. aggregate(_this._pipeline, options || {}); decorateCursor(cursor); resolve(cursor); callback && callback(null, cursor); }); return; } _this._model.collection.emitter.once('queue', function() { var cursor = _this._model.collection. aggregate(_this._pipeline, options || {}); decorateCursor(cursor); resolve(cursor); callback && callback(null, cursor); }); }); } else if (options.cursor.useMongooseAggCursor) { delete options.cursor.useMongooseAggCursor; return new AggregationCursor(this); } var cursor = this._model.collection. aggregate(this._pipeline, this.options || {}); decorateCursor(cursor); return cursor; } return new Promise.ES6(function(resolve, reject) { if (!_this._pipeline.length) { var err = new Error('Aggregate has empty pipeline'); if (callback) { callback(err); } reject(err); return; } prepareDiscriminatorPipeline(_this); _this._model .collection .aggregate(_this._pipeline, _this.options || {}, function(error, result) { if (error) { if (callback) { callback(error); } reject(error); return; } if (callback) { callback(null, result); } resolve(result); }); }); }; /*! * Add `eachAsync()` to aggregation cursors */ function decorateCursor(cursor) { cursor.eachAsync = function(fn, opts, callback) { if (typeof opts === 'function') { callback = opts; opts = {}; } opts = opts || {}; return eachAsync(function(cb) { return cursor.next(cb); }, fn, opts, callback); }; } /** * Provides promise for aggregate. * * ####Example: * * Model.aggregate(..).then(successCallback, errorCallback); * * @see Promise #promise_Promise * @param {Function} [resolve] successCallback * @param {Function} [reject] errorCallback * @return {Promise} */ Aggregate.prototype.then = function(resolve, reject) { return this.exec().then(resolve, reject); }; /*! * Helpers */ /** * Checks whether an object is likely a pipeline operator * * @param {Object} obj object to check * @return {Boolean} * @api private */ function isOperator(obj) { var k; if (typeof obj !== 'object') { return false; } k = Object.keys(obj); return k.length === 1 && k .some(function(key) { return key[0] === '$'; }); } /*! * Adds the appropriate `$match` pipeline step to the top of an aggregate's * pipeline, should it's model is a non-root discriminator type. This is * analogous to the `prepareDiscriminatorCriteria` function in `lib/query.js`. * * @param {Aggregate} aggregate Aggregate to prepare */ function prepareDiscriminatorPipeline(aggregate) { var schema = aggregate._model.schema, discriminatorMapping = schema && schema.discriminatorMapping; if (discriminatorMapping && !discriminatorMapping.isRoot) { var originalPipeline = aggregate._pipeline, discriminatorKey = discriminatorMapping.key, discriminatorValue = discriminatorMapping.value; // If the first pipeline stage is a match and it doesn't specify a `__t` // key, add the discriminator key to it. This allows for potential // aggregation query optimizations not to be disturbed by this feature. if (originalPipeline[0] && originalPipeline[0].$match && !originalPipeline[0].$match[discriminatorKey]) { originalPipeline[0].$match[discriminatorKey] = discriminatorValue; // `originalPipeline` is a ref, so there's no need for // aggregate._pipeline = originalPipeline } else if (originalPipeline[0] && originalPipeline[0].$geoNear) { originalPipeline[0].$geoNear.query = originalPipeline[0].$geoNear.query || {}; originalPipeline[0].$geoNear.query[discriminatorKey] = discriminatorValue; } else { var match = {}; match[discriminatorKey] = discriminatorValue; aggregate._pipeline = [{$match: match}].concat(originalPipeline); } } } /*! * Exports */ module.exports = Aggregate;
mit
alexwiese/EasyNetQ
Source/EasyNetQ.Hosepipe/MessageReader.cs
1735
using System; using System.Collections.Generic; using System.IO; namespace EasyNetQ.Hosepipe { public class MessageReader : IMessageReader { public IEnumerable<HosepipeMessage> ReadMessages(QueueParameters parameters) { return ReadMessages(parameters, null); } public IEnumerable<HosepipeMessage> ReadMessages(QueueParameters parameters, string messageName) { if (!Directory.Exists(parameters.MessageFilePath)) { Console.WriteLine("Directory '{0}' does not exist", parameters.MessageFilePath); yield break; } var bodyPattern = (messageName ?? "*") + ".*.message.txt"; foreach (var file in Directory.GetFiles(parameters.MessageFilePath, bodyPattern)) { const string messageTag = ".message."; var directoryName = Path.GetDirectoryName(file); var fileName = Path.GetFileName(file); var propertiesFileName = Path.Combine(directoryName, fileName.Replace(messageTag, ".properties.")); var infoFileName = Path.Combine(directoryName, fileName.Replace(messageTag, ".info.")); var body = File.ReadAllText(file); var propertiesJson = File.ReadAllText(propertiesFileName); var properties = Newtonsoft.Json.JsonConvert.DeserializeObject<MessageProperties>(propertiesJson); var infoJson = File.ReadAllText(infoFileName); var info = Newtonsoft.Json.JsonConvert.DeserializeObject<MessageReceivedInfo>(infoJson); yield return new HosepipeMessage(body, properties, info); } } } }
mit
StudioBOZ/thirdClass
app/code/core/Mage/Adminhtml/Block/Customer/Edit/Tab/Wishlist/Grid/Renderer/Description.php
1457
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Adminhtml * @copyright Copyright (c) 2013 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Adminhtml customers wishlist grid item renderer for item visibility * * @category Mage * @package Mage_Adminhtml * @author Magento Core Team <[email protected]> */ class Mage_Adminhtml_Block_Customer_Edit_Tab_Wishlist_Grid_Renderer_Description extends Mage_Adminhtml_Block_Widget_Grid_Column_Renderer_Abstract { public function render(Varien_Object $row) { return nl2br(htmlspecialchars($row->getData($this->getColumn()->getIndex()))); } }
mit
chemissi/P2
src/public/lib/Zend/Gdata/YouTube/ActivityFeed.php
1905
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * @category Zend * @package Zend_Gdata * @subpackage YouTube * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id: ActivityFeed.php 24593 2012-01-05 20:35:02Z matthew $ */ /** * @see Zend_Gdata_Feed */ #require_once 'Zend/Gdata/Feed.php'; /** * @see Zend_Gdata_YouTube_ActivityEntry */ #require_once 'Zend/Gdata/YouTube/ActivityEntry.php'; /** * A feed of user activity entries for YouTube * * @link http://code.google.com/apis/youtube/ * @category Zend * @package Zend_Gdata * @subpackage YouTube * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Gdata_YouTube_ActivityFeed extends Zend_Gdata_Feed { /** * The classname for individual feed elements. * * @var string */ protected $_entryClassName = 'Zend_Gdata_YouTube_ActivityEntry'; /** * Creates an Activity feed, representing a list of activity entries * * @param DOMElement $element (optional) DOMElement from which this * object should be constructed. */ public function __construct($element = null) { $this->registerAllNamespaces(Zend_Gdata_YouTube::$namespaces); parent::__construct($element); } }
gpl-2.0
MonsieurTweek/PitchMyGame
lib/ezc/Template/src/syntax_trees/tst/nodes/negate_operator.php
937
<?php /** * File containing the ezcTemplateNegateOperatorTstNode class * * @package Template * @version //autogen// * @copyright Copyright (C) 2005-2010 eZ Systems AS. All rights reserved. * @license http://ez.no/licenses/new_bsd New BSD License * @access private */ /** * Fetching of property value in an expression. * * @package Template * @version //autogen// * @access private */ class ezcTemplateNegateOperatorTstNode extends ezcTemplateOperatorTstNode { /** * * @param ezcTemplateSource $source * @param ezcTemplateCursor $start * @param ezcTemplateCursor $end */ public function __construct( ezcTemplateSourceCode $source, /*ezcTemplateCursor*/ $start, /*ezcTemplateCursor*/ $end ) { parent::__construct( $source, $start, $end, 9, 2, self::NON_ASSOCIATIVE, '-' ); $this->maxParameterCount = 1; } } ?>
gpl-2.0
Devportobello/joomla-cms
layouts/joomla/toolbar/slider.php
708
<?php /** * @package Joomla.Site * @subpackage Layout * * @copyright Copyright (C) 2005 - 2018 Open Source Matters, Inc. All rights reserved. * @license GNU General Public License version 2 or later; see LICENSE.txt */ defined('JPATH_BASE') or die; JHtml::_('behavior.core'); $doTask = $displayData['doTask']; $class = $displayData['class']; $text = $displayData['text']; $name = $displayData['name']; $onClose = $displayData['onClose']; ?> <button onclick="<?php echo $doTask; ?>" class="btn btn-small" data-toggle="collapse" data-target="#collapse-<?php echo $name; ?>"<?php echo $onClose; ?>> <span class="icon-cog" aria-hidden="true"></span> <?php echo $text; ?> </button>
gpl-2.0
chemissi/P2
src/public/lib/Zend/Search/Lucene/Storage/File/Memory.php
17696
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * @category Zend * @package Zend_Search_Lucene * @subpackage Storage * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id: Memory.php 24593 2012-01-05 20:35:02Z matthew $ */ /** Zend_Search_Lucene_Storage_File */ #require_once 'Zend/Search/Lucene/Storage/File.php'; /** * @category Zend * @package Zend_Search_Lucene * @subpackage Storage * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Search_Lucene_Storage_File_Memory extends Zend_Search_Lucene_Storage_File { /** * FileData * * @var string */ private $_data; /** * File Position * * @var integer */ private $_position = 0; /** * Object constractor * * @param string $data */ public function __construct($data) { $this->_data = $data; } /** * Reads $length number of bytes at the current position in the * file and advances the file pointer. * * @param integer $length * @return string */ protected function _fread($length = 1) { $returnValue = substr($this->_data, $this->_position, $length); $this->_position += $length; return $returnValue; } /** * Sets the file position indicator and advances the file pointer. * The new position, measured in bytes from the beginning of the file, * is obtained by adding offset to the position specified by whence, * whose values are defined as follows: * SEEK_SET - Set position equal to offset bytes. * SEEK_CUR - Set position to current location plus offset. * SEEK_END - Set position to end-of-file plus offset. (To move to * a position before the end-of-file, you need to pass a negative value * in offset.) * Upon success, returns 0; otherwise, returns -1 * * @param integer $offset * @param integer $whence * @return integer */ public function seek($offset, $whence=SEEK_SET) { switch ($whence) { case SEEK_SET: $this->_position = $offset; break; case SEEK_CUR: $this->_position += $offset; break; case SEEK_END: $this->_position = strlen($this->_data); $this->_position += $offset; break; default: break; } } /** * Get file position. * * @return integer */ public function tell() { return $this->_position; } /** * Flush output. * * Returns true on success or false on failure. * * @return boolean */ public function flush() { // Do nothing return true; } /** * Writes $length number of bytes (all, if $length===null) to the end * of the file. * * @param string $data * @param integer $length */ protected function _fwrite($data, $length=null) { // We do not need to check if file position points to the end of "file". // Only append operation is supported now if ($length !== null) { $this->_data .= substr($data, 0, $length); } else { $this->_data .= $data; } $this->_position = strlen($this->_data); } /** * Lock file * * Lock type may be a LOCK_SH (shared lock) or a LOCK_EX (exclusive lock) * * @param integer $lockType * @return boolean */ public function lock($lockType, $nonBlockinLock = false) { // Memory files can't be shared // do nothing return true; } /** * Unlock file */ public function unlock() { // Memory files can't be shared // do nothing } /** * Reads a byte from the current position in the file * and advances the file pointer. * * @return integer */ public function readByte() { return ord($this->_data[$this->_position++]); } /** * Writes a byte to the end of the file. * * @param integer $byte */ public function writeByte($byte) { // We do not need to check if file position points to the end of "file". // Only append operation is supported now $this->_data .= chr($byte); $this->_position = strlen($this->_data); return 1; } /** * Read num bytes from the current position in the file * and advances the file pointer. * * @param integer $num * @return string */ public function readBytes($num) { $returnValue = substr($this->_data, $this->_position, $num); $this->_position += $num; return $returnValue; } /** * Writes num bytes of data (all, if $num===null) to the end * of the string. * * @param string $data * @param integer $num */ public function writeBytes($data, $num=null) { // We do not need to check if file position points to the end of "file". // Only append operation is supported now if ($num !== null) { $this->_data .= substr($data, 0, $num); } else { $this->_data .= $data; } $this->_position = strlen($this->_data); } /** * Reads an integer from the current position in the file * and advances the file pointer. * * @return integer */ public function readInt() { $str = substr($this->_data, $this->_position, 4); $this->_position += 4; return ord($str[0]) << 24 | ord($str[1]) << 16 | ord($str[2]) << 8 | ord($str[3]); } /** * Writes an integer to the end of file. * * @param integer $value */ public function writeInt($value) { // We do not need to check if file position points to the end of "file". // Only append operation is supported now settype($value, 'integer'); $this->_data .= chr($value>>24 & 0xFF) . chr($value>>16 & 0xFF) . chr($value>>8 & 0xFF) . chr($value & 0xFF); $this->_position = strlen($this->_data); } /** * Returns a long integer from the current position in the file * and advances the file pointer. * * @return integer * @throws Zend_Search_Lucene_Exception */ public function readLong() { /** * Check, that we work in 64-bit mode. * fseek() uses long for offset. Thus, largest index segment file size in 32bit mode is 2Gb */ if (PHP_INT_SIZE > 4) { $str = substr($this->_data, $this->_position, 8); $this->_position += 8; return ord($str[0]) << 56 | ord($str[1]) << 48 | ord($str[2]) << 40 | ord($str[3]) << 32 | ord($str[4]) << 24 | ord($str[5]) << 16 | ord($str[6]) << 8 | ord($str[7]); } else { return $this->readLong32Bit(); } } /** * Writes long integer to the end of file * * @param integer $value * @throws Zend_Search_Lucene_Exception */ public function writeLong($value) { // We do not need to check if file position points to the end of "file". // Only append operation is supported now /** * Check, that we work in 64-bit mode. * fseek() and ftell() use long for offset. Thus, largest index segment file size in 32bit mode is 2Gb */ if (PHP_INT_SIZE > 4) { settype($value, 'integer'); $this->_data .= chr($value>>56 & 0xFF) . chr($value>>48 & 0xFF) . chr($value>>40 & 0xFF) . chr($value>>32 & 0xFF) . chr($value>>24 & 0xFF) . chr($value>>16 & 0xFF) . chr($value>>8 & 0xFF) . chr($value & 0xFF); } else { $this->writeLong32Bit($value); } $this->_position = strlen($this->_data); } /** * Returns a long integer from the current position in the file, * advances the file pointer and return it as float (for 32-bit platforms). * * @return integer|float * @throws Zend_Search_Lucene_Exception */ public function readLong32Bit() { $wordHigh = $this->readInt(); $wordLow = $this->readInt(); if ($wordHigh & (int)0x80000000) { // It's a negative value since the highest bit is set if ($wordHigh == (int)0xFFFFFFFF && ($wordLow & (int)0x80000000)) { return $wordLow; } else { #require_once 'Zend/Search/Lucene/Exception.php'; throw new Zend_Search_Lucene_Exception('Long integers lower than -2147483648 (0x80000000) are not supported on 32-bit platforms.'); } } if ($wordLow < 0) { // Value is large than 0x7FFF FFFF. Represent low word as float. $wordLow &= 0x7FFFFFFF; $wordLow += (float)0x80000000; } if ($wordHigh == 0) { // Return value as integer if possible return $wordLow; } return $wordHigh*(float)0x100000000/* 0x00000001 00000000 */ + $wordLow; } /** * Writes long integer to the end of file (32-bit platforms implementation) * * @param integer|float $value * @throws Zend_Search_Lucene_Exception */ public function writeLong32Bit($value) { if ($value < (int)0x80000000) { #require_once 'Zend/Search/Lucene/Exception.php'; throw new Zend_Search_Lucene_Exception('Long integers lower than -2147483648 (0x80000000) are not supported on 32-bit platforms.'); } if ($value < 0) { $wordHigh = (int)0xFFFFFFFF; $wordLow = (int)$value; } else { $wordHigh = (int)($value/(float)0x100000000/* 0x00000001 00000000 */); $wordLow = $value - $wordHigh*(float)0x100000000/* 0x00000001 00000000 */; if ($wordLow > 0x7FFFFFFF) { // Highest bit of low word is set. Translate it to the corresponding negative integer value $wordLow -= 0x80000000; $wordLow |= 0x80000000; } } $this->writeInt($wordHigh); $this->writeInt($wordLow); } /** * Returns a variable-length integer from the current * position in the file and advances the file pointer. * * @return integer */ public function readVInt() { $nextByte = ord($this->_data[$this->_position++]); $val = $nextByte & 0x7F; for ($shift=7; ($nextByte & 0x80) != 0; $shift += 7) { $nextByte = ord($this->_data[$this->_position++]); $val |= ($nextByte & 0x7F) << $shift; } return $val; } /** * Writes a variable-length integer to the end of file. * * @param integer $value */ public function writeVInt($value) { // We do not need to check if file position points to the end of "file". // Only append operation is supported now settype($value, 'integer'); while ($value > 0x7F) { $this->_data .= chr( ($value & 0x7F)|0x80 ); $value >>= 7; } $this->_data .= chr($value); $this->_position = strlen($this->_data); } /** * Reads a string from the current position in the file * and advances the file pointer. * * @return string */ public function readString() { $strlen = $this->readVInt(); if ($strlen == 0) { return ''; } else { /** * This implementation supports only Basic Multilingual Plane * (BMP) characters (from 0x0000 to 0xFFFF) and doesn't support * "supplementary characters" (characters whose code points are * greater than 0xFFFF) * Java 2 represents these characters as a pair of char (16-bit) * values, the first from the high-surrogates range (0xD800-0xDBFF), * the second from the low-surrogates range (0xDC00-0xDFFF). Then * they are encoded as usual UTF-8 characters in six bytes. * Standard UTF-8 representation uses four bytes for supplementary * characters. */ $str_val = substr($this->_data, $this->_position, $strlen); $this->_position += $strlen; for ($count = 0; $count < $strlen; $count++ ) { if (( ord($str_val[$count]) & 0xC0 ) == 0xC0) { $addBytes = 1; if (ord($str_val[$count]) & 0x20 ) { $addBytes++; // Never used. Java2 doesn't encode strings in four bytes if (ord($str_val[$count]) & 0x10 ) { $addBytes++; } } $str_val .= substr($this->_data, $this->_position, $addBytes); $this->_position += $addBytes; $strlen += $addBytes; // Check for null character. Java2 encodes null character // in two bytes. if (ord($str_val[$count]) == 0xC0 && ord($str_val[$count+1]) == 0x80 ) { $str_val[$count] = 0; $str_val = substr($str_val,0,$count+1) . substr($str_val,$count+2); } $count += $addBytes; } } return $str_val; } } /** * Writes a string to the end of file. * * @param string $str * @throws Zend_Search_Lucene_Exception */ public function writeString($str) { /** * This implementation supports only Basic Multilingual Plane * (BMP) characters (from 0x0000 to 0xFFFF) and doesn't support * "supplementary characters" (characters whose code points are * greater than 0xFFFF) * Java 2 represents these characters as a pair of char (16-bit) * values, the first from the high-surrogates range (0xD800-0xDBFF), * the second from the low-surrogates range (0xDC00-0xDFFF). Then * they are encoded as usual UTF-8 characters in six bytes. * Standard UTF-8 representation uses four bytes for supplementary * characters. */ // We do not need to check if file position points to the end of "file". // Only append operation is supported now // convert input to a string before iterating string characters settype($str, 'string'); $chars = $strlen = strlen($str); $containNullChars = false; for ($count = 0; $count < $strlen; $count++ ) { /** * String is already in Java 2 representation. * We should only calculate actual string length and replace * \x00 by \xC0\x80 */ if ((ord($str[$count]) & 0xC0) == 0xC0) { $addBytes = 1; if (ord($str[$count]) & 0x20 ) { $addBytes++; // Never used. Java2 doesn't encode strings in four bytes // and we dont't support non-BMP characters if (ord($str[$count]) & 0x10 ) { $addBytes++; } } $chars -= $addBytes; if (ord($str[$count]) == 0 ) { $containNullChars = true; } $count += $addBytes; } } if ($chars < 0) { #require_once 'Zend/Search/Lucene/Exception.php'; throw new Zend_Search_Lucene_Exception('Invalid UTF-8 string'); } $this->writeVInt($chars); if ($containNullChars) { $this->_data .= str_replace($str, "\x00", "\xC0\x80"); } else { $this->_data .= $str; } $this->_position = strlen($this->_data); } /** * Reads binary data from the current position in the file * and advances the file pointer. * * @return string */ public function readBinary() { $length = $this->readVInt(); $returnValue = substr($this->_data, $this->_position, $length); $this->_position += $length; return $returnValue; } }
gpl-2.0
photodude/joomla-cms
administrator/components/com_content/views/article/tmpl/edit.php
5545
<?php /** * @package Joomla.Administrator * @subpackage com_content * * @copyright Copyright (C) 2005 - 2018 Open Source Matters, Inc. All rights reserved. * @license GNU General Public License version 2 or later; see LICENSE.txt */ defined('_JEXEC') or die; use Joomla\Registry\Registry; // Include the component HTML helpers. JHtml::addIncludePath(JPATH_COMPONENT . '/helpers/html'); JHtml::_('behavior.formvalidator'); JHtml::_('behavior.keepalive'); JHtml::_('formbehavior.chosen', '#jform_catid', null, array('disable_search_threshold' => 0 )); JHtml::_('formbehavior.chosen', 'select'); $this->configFieldsets = array('editorConfig'); $this->hiddenFieldsets = array('basic-limited'); $this->ignore_fieldsets = array('jmetadata', 'item_associations'); // Create shortcut to parameters. $params = clone $this->state->get('params'); $params->merge(new Registry($this->item->attribs)); $app = JFactory::getApplication(); $input = $app->input; $assoc = JLanguageAssociations::isEnabled(); JFactory::getDocument()->addScriptDeclaration(' Joomla.submitbutton = function(task) { if (task == "article.cancel" || document.formvalidator.isValid(document.getElementById("item-form"))) { jQuery("#permissions-sliders select").attr("disabled", "disabled"); ' . $this->form->getField('articletext')->save() . ' Joomla.submitform(task, document.getElementById("item-form")); // @deprecated 4.0 The following js is not needed since 3.7.0. if (task !== "article.apply") { window.parent.jQuery("#articleEdit' . (int) $this->item->id . 'Modal").modal("hide"); } } }; '); // In case of modal $isModal = $input->get('layout') == 'modal' ? true : false; $layout = $isModal ? 'modal' : 'edit'; $tmpl = $isModal || $input->get('tmpl', '', 'cmd') === 'component' ? '&tmpl=component' : ''; ?> <form action="<?php echo JRoute::_('index.php?option=com_content&layout=' . $layout . $tmpl . '&id=' . (int) $this->item->id); ?>" method="post" name="adminForm" id="item-form" class="form-validate"> <?php echo JLayoutHelper::render('joomla.edit.title_alias', $this); ?> <div class="form-horizontal"> <?php echo JHtml::_('bootstrap.startTabSet', 'myTab', array('active' => 'general')); ?> <?php echo JHtml::_('bootstrap.addTab', 'myTab', 'general', JText::_('COM_CONTENT_ARTICLE_CONTENT')); ?> <div class="row-fluid"> <div class="span9"> <fieldset class="adminform"> <?php echo $this->form->getInput('articletext'); ?> </fieldset> </div> <div class="span3"> <?php echo JLayoutHelper::render('joomla.edit.global', $this); ?> </div> </div> <?php echo JHtml::_('bootstrap.endTab'); ?> <?php // Do not show the images and links options if the edit form is configured not to. ?> <?php if ($params->get('show_urls_images_backend') == 1) : ?> <?php echo JHtml::_('bootstrap.addTab', 'myTab', 'images', JText::_('COM_CONTENT_FIELDSET_URLS_AND_IMAGES')); ?> <div class="row-fluid form-horizontal-desktop"> <div class="span6"> <?php echo $this->form->renderField('images'); ?> <?php foreach ($this->form->getGroup('images') as $field) : ?> <?php echo $field->renderField(); ?> <?php endforeach; ?> </div> <div class="span6"> <?php foreach ($this->form->getGroup('urls') as $field) : ?> <?php echo $field->renderField(); ?> <?php endforeach; ?> </div> </div> <?php echo JHtml::_('bootstrap.endTab'); ?> <?php endif; ?> <?php $this->show_options = $params->get('show_article_options', 1); ?> <?php echo JLayoutHelper::render('joomla.edit.params', $this); ?> <?php // Do not show the publishing options if the edit form is configured not to. ?> <?php if ($params->get('show_publishing_options', 1) == 1) : ?> <?php echo JHtml::_('bootstrap.addTab', 'myTab', 'publishing', JText::_('COM_CONTENT_FIELDSET_PUBLISHING')); ?> <div class="row-fluid form-horizontal-desktop"> <div class="span6"> <?php echo JLayoutHelper::render('joomla.edit.publishingdata', $this); ?> </div> <div class="span6"> <?php echo JLayoutHelper::render('joomla.edit.metadata', $this); ?> </div> </div> <?php echo JHtml::_('bootstrap.endTab'); ?> <?php endif; ?> <?php if ( ! $isModal && $assoc) : ?> <?php echo JHtml::_('bootstrap.addTab', 'myTab', 'associations', JText::_('JGLOBAL_FIELDSET_ASSOCIATIONS')); ?> <?php echo $this->loadTemplate('associations'); ?> <?php echo JHtml::_('bootstrap.endTab'); ?> <?php elseif ($isModal && $assoc) : ?> <div class="hidden"><?php echo $this->loadTemplate('associations'); ?></div> <?php endif; ?> <?php if ($this->canDo->get('core.admin')) : ?> <?php echo JHtml::_('bootstrap.addTab', 'myTab', 'editor', JText::_('COM_CONTENT_SLIDER_EDITOR_CONFIG')); ?> <?php echo $this->form->renderFieldset('editorConfig'); ?> <?php echo JHtml::_('bootstrap.endTab'); ?> <?php endif; ?> <?php if ($this->canDo->get('core.admin')) : ?> <?php echo JHtml::_('bootstrap.addTab', 'myTab', 'permissions', JText::_('COM_CONTENT_FIELDSET_RULES')); ?> <?php echo $this->form->getInput('rules'); ?> <?php echo JHtml::_('bootstrap.endTab'); ?> <?php endif; ?> <?php echo JHtml::_('bootstrap.endTabSet'); ?> <input type="hidden" name="task" value="" /> <input type="hidden" name="return" value="<?php echo $input->getCmd('return'); ?>" /> <input type="hidden" name="forcedLanguage" value="<?php echo $input->get('forcedLanguage', '', 'cmd'); ?>" /> <?php echo JHtml::_('form.token'); ?> </div> </form>
gpl-2.0
FauxFaux/jdk9-jdk
src/java.base/solaris/classes/sun/nio/fs/SolarisAclFileAttributeView.java
15959
/* * Copyright (c) 2008, 2015, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.nio.fs; import java.nio.file.*; import java.nio.file.attribute.*; import java.util.*; import java.io.IOException; import jdk.internal.misc.Unsafe; import static sun.nio.fs.UnixConstants.*; import static sun.nio.fs.SolarisConstants.*; import static sun.nio.fs.SolarisNativeDispatcher.*; /** * Solaris implementation of AclFileAttributeView with native support for * NFSv4 ACLs on ZFS. */ class SolarisAclFileAttributeView extends AbstractAclFileAttributeView { private static final Unsafe unsafe = Unsafe.getUnsafe(); // Maximum number of entries allowed in an ACL private static final int MAX_ACL_ENTRIES = 1024; /** * typedef struct ace { * uid_t a_who; * uint32_t a_access_mask; * uint16_t a_flags; * uint16_t a_type; * } ace_t; */ private static final short SIZEOF_ACE_T = 12; private static final short OFFSETOF_UID = 0; private static final short OFFSETOF_MASK = 4; private static final short OFFSETOF_FLAGS = 8; private static final short OFFSETOF_TYPE = 10; private final UnixPath file; private final boolean followLinks; SolarisAclFileAttributeView(UnixPath file, boolean followLinks) { this.file = file; this.followLinks = followLinks; } /** * Permission checks to access file */ private void checkAccess(UnixPath file, boolean checkRead, boolean checkWrite) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { if (checkRead) file.checkRead(); if (checkWrite) file.checkWrite(); sm.checkPermission(new RuntimePermission("accessUserInformation")); } } /** * Encode the ACL to the given buffer */ private static void encode(List<AclEntry> acl, long address) { long offset = address; for (AclEntry ace: acl) { int flags = 0; // map UserPrincipal to uid and flags UserPrincipal who = ace.principal(); if (!(who instanceof UnixUserPrincipals.User)) throw new ProviderMismatchException(); UnixUserPrincipals.User user = (UnixUserPrincipals.User)who; int uid; if (user.isSpecial()) { uid = -1; if (who == UnixUserPrincipals.SPECIAL_OWNER) flags |= ACE_OWNER; else if (who == UnixUserPrincipals.SPECIAL_GROUP) flags |= (ACE_GROUP | ACE_IDENTIFIER_GROUP); else if (who == UnixUserPrincipals.SPECIAL_EVERYONE) flags |= ACE_EVERYONE; else throw new AssertionError("Unable to map special identifier"); } else { if (user instanceof UnixUserPrincipals.Group) { uid = user.gid(); flags |= ACE_IDENTIFIER_GROUP; } else { uid = user.uid(); } } // map ACE type int type; switch (ace.type()) { case ALLOW: type = ACE_ACCESS_ALLOWED_ACE_TYPE; break; case DENY: type = ACE_ACCESS_DENIED_ACE_TYPE; break; case AUDIT: type = ACE_SYSTEM_AUDIT_ACE_TYPE; break; case ALARM: type = ACE_SYSTEM_ALARM_ACE_TYPE; break; default: throw new AssertionError("Unable to map ACE type"); } // map permissions Set<AclEntryPermission> aceMask = ace.permissions(); int mask = 0; if (aceMask.contains(AclEntryPermission.READ_DATA)) mask |= ACE_READ_DATA; if (aceMask.contains(AclEntryPermission.WRITE_DATA)) mask |= ACE_WRITE_DATA; if (aceMask.contains(AclEntryPermission.APPEND_DATA)) mask |= ACE_APPEND_DATA; if (aceMask.contains(AclEntryPermission.READ_NAMED_ATTRS)) mask |= ACE_READ_NAMED_ATTRS; if (aceMask.contains(AclEntryPermission.WRITE_NAMED_ATTRS)) mask |= ACE_WRITE_NAMED_ATTRS; if (aceMask.contains(AclEntryPermission.EXECUTE)) mask |= ACE_EXECUTE; if (aceMask.contains(AclEntryPermission.DELETE_CHILD)) mask |= ACE_DELETE_CHILD; if (aceMask.contains(AclEntryPermission.READ_ATTRIBUTES)) mask |= ACE_READ_ATTRIBUTES; if (aceMask.contains(AclEntryPermission.WRITE_ATTRIBUTES)) mask |= ACE_WRITE_ATTRIBUTES; if (aceMask.contains(AclEntryPermission.DELETE)) mask |= ACE_DELETE; if (aceMask.contains(AclEntryPermission.READ_ACL)) mask |= ACE_READ_ACL; if (aceMask.contains(AclEntryPermission.WRITE_ACL)) mask |= ACE_WRITE_ACL; if (aceMask.contains(AclEntryPermission.WRITE_OWNER)) mask |= ACE_WRITE_OWNER; if (aceMask.contains(AclEntryPermission.SYNCHRONIZE)) mask |= ACE_SYNCHRONIZE; // FIXME - it would be desirable to know here if the file is a // directory or not. Solaris returns EINVAL if an ACE has a directory // -only flag and the file is not a directory. Set<AclEntryFlag> aceFlags = ace.flags(); if (aceFlags.contains(AclEntryFlag.FILE_INHERIT)) flags |= ACE_FILE_INHERIT_ACE; if (aceFlags.contains(AclEntryFlag.DIRECTORY_INHERIT)) flags |= ACE_DIRECTORY_INHERIT_ACE; if (aceFlags.contains(AclEntryFlag.NO_PROPAGATE_INHERIT)) flags |= ACE_NO_PROPAGATE_INHERIT_ACE; if (aceFlags.contains(AclEntryFlag.INHERIT_ONLY)) flags |= ACE_INHERIT_ONLY_ACE; unsafe.putInt(offset + OFFSETOF_UID, uid); unsafe.putInt(offset + OFFSETOF_MASK, mask); unsafe.putShort(offset + OFFSETOF_FLAGS, (short)flags); unsafe.putShort(offset + OFFSETOF_TYPE, (short)type); offset += SIZEOF_ACE_T; } } /** * Decode the buffer, returning an ACL */ private static List<AclEntry> decode(long address, int n) { ArrayList<AclEntry> acl = new ArrayList<>(n); for (int i=0; i<n; i++) { long offset = address + i*SIZEOF_ACE_T; int uid = unsafe.getInt(offset + OFFSETOF_UID); int mask = unsafe.getInt(offset + OFFSETOF_MASK); int flags = (int)unsafe.getShort(offset + OFFSETOF_FLAGS); int type = (int)unsafe.getShort(offset + OFFSETOF_TYPE); // map uid and flags to UserPrincipal UnixUserPrincipals.User who = null; if ((flags & ACE_OWNER) > 0) { who = UnixUserPrincipals.SPECIAL_OWNER; } else if ((flags & ACE_GROUP) > 0) { who = UnixUserPrincipals.SPECIAL_GROUP; } else if ((flags & ACE_EVERYONE) > 0) { who = UnixUserPrincipals.SPECIAL_EVERYONE; } else if ((flags & ACE_IDENTIFIER_GROUP) > 0) { who = UnixUserPrincipals.fromGid(uid); } else { who = UnixUserPrincipals.fromUid(uid); } AclEntryType aceType = null; switch (type) { case ACE_ACCESS_ALLOWED_ACE_TYPE: aceType = AclEntryType.ALLOW; break; case ACE_ACCESS_DENIED_ACE_TYPE: aceType = AclEntryType.DENY; break; case ACE_SYSTEM_AUDIT_ACE_TYPE: aceType = AclEntryType.AUDIT; break; case ACE_SYSTEM_ALARM_ACE_TYPE: aceType = AclEntryType.ALARM; break; default: assert false; } Set<AclEntryPermission> aceMask = EnumSet.noneOf(AclEntryPermission.class); if ((mask & ACE_READ_DATA) > 0) aceMask.add(AclEntryPermission.READ_DATA); if ((mask & ACE_WRITE_DATA) > 0) aceMask.add(AclEntryPermission.WRITE_DATA); if ((mask & ACE_APPEND_DATA ) > 0) aceMask.add(AclEntryPermission.APPEND_DATA); if ((mask & ACE_READ_NAMED_ATTRS) > 0) aceMask.add(AclEntryPermission.READ_NAMED_ATTRS); if ((mask & ACE_WRITE_NAMED_ATTRS) > 0) aceMask.add(AclEntryPermission.WRITE_NAMED_ATTRS); if ((mask & ACE_EXECUTE) > 0) aceMask.add(AclEntryPermission.EXECUTE); if ((mask & ACE_DELETE_CHILD ) > 0) aceMask.add(AclEntryPermission.DELETE_CHILD); if ((mask & ACE_READ_ATTRIBUTES) > 0) aceMask.add(AclEntryPermission.READ_ATTRIBUTES); if ((mask & ACE_WRITE_ATTRIBUTES) > 0) aceMask.add(AclEntryPermission.WRITE_ATTRIBUTES); if ((mask & ACE_DELETE) > 0) aceMask.add(AclEntryPermission.DELETE); if ((mask & ACE_READ_ACL) > 0) aceMask.add(AclEntryPermission.READ_ACL); if ((mask & ACE_WRITE_ACL) > 0) aceMask.add(AclEntryPermission.WRITE_ACL); if ((mask & ACE_WRITE_OWNER) > 0) aceMask.add(AclEntryPermission.WRITE_OWNER); if ((mask & ACE_SYNCHRONIZE) > 0) aceMask.add(AclEntryPermission.SYNCHRONIZE); Set<AclEntryFlag> aceFlags = EnumSet.noneOf(AclEntryFlag.class); if ((flags & ACE_FILE_INHERIT_ACE) > 0) aceFlags.add(AclEntryFlag.FILE_INHERIT); if ((flags & ACE_DIRECTORY_INHERIT_ACE) > 0) aceFlags.add(AclEntryFlag.DIRECTORY_INHERIT); if ((flags & ACE_NO_PROPAGATE_INHERIT_ACE) > 0) aceFlags.add(AclEntryFlag.NO_PROPAGATE_INHERIT); if ((flags & ACE_INHERIT_ONLY_ACE) > 0) aceFlags.add(AclEntryFlag.INHERIT_ONLY); // build the ACL entry and add it to the list AclEntry ace = AclEntry.newBuilder() .setType(aceType) .setPrincipal(who) .setPermissions(aceMask).setFlags(aceFlags).build(); acl.add(ace); } return acl; } // Returns true if NFSv4 ACLs not enabled on file system private static boolean isAclsEnabled(int fd) { try { long enabled = fpathconf(fd, _PC_ACL_ENABLED); if (enabled == _ACL_ACE_ENABLED) return true; } catch (UnixException x) { } return false; } @Override public List<AclEntry> getAcl() throws IOException { // permission check checkAccess(file, true, false); // open file (will fail if file is a link and not following links) int fd = -1; try { fd = file.openForAttributeAccess(followLinks); } catch (UnixException x) { x.rethrowAsIOException(file); } try { long address = unsafe.allocateMemory(SIZEOF_ACE_T * MAX_ACL_ENTRIES); try { // read ACL and decode it int n = facl(fd, ACE_GETACL, MAX_ACL_ENTRIES, address); assert n >= 0; return decode(address, n); } catch (UnixException x) { if ((x.errno() == ENOSYS) || !isAclsEnabled(fd)) { throw new FileSystemException(file.getPathForExceptionMessage(), null, x.getMessage() + " (file system does not support NFSv4 ACLs)"); } x.rethrowAsIOException(file); return null; // keep compiler happy } finally { unsafe.freeMemory(address); } } finally { close(fd); } } @Override public void setAcl(List<AclEntry> acl) throws IOException { // permission check checkAccess(file, false, true); // open file (will fail if file is a link and not following links) int fd = -1; try { fd = file.openForAttributeAccess(followLinks); } catch (UnixException x) { x.rethrowAsIOException(file); } try { // SECURITY: need to copy list as can change during processing acl = new ArrayList<AclEntry>(acl); int n = acl.size(); long address = unsafe.allocateMemory(SIZEOF_ACE_T * n); try { encode(acl, address); facl(fd, ACE_SETACL, n, address); } catch (UnixException x) { if ((x.errno() == ENOSYS) || !isAclsEnabled(fd)) { throw new FileSystemException(file.getPathForExceptionMessage(), null, x.getMessage() + " (file system does not support NFSv4 ACLs)"); } if (x.errno() == EINVAL && (n < 3)) throw new IOException("ACL must contain at least 3 entries"); x.rethrowAsIOException(file); } finally { unsafe.freeMemory(address); } } finally { close(fd); } } @Override public UserPrincipal getOwner() throws IOException { checkAccess(file, true, false); try { UnixFileAttributes attrs = UnixFileAttributes.get(file, followLinks); return UnixUserPrincipals.fromUid(attrs.uid()); } catch (UnixException x) { x.rethrowAsIOException(file); return null; // keep compile happy } } @Override public void setOwner(UserPrincipal owner) throws IOException { checkAccess(file, true, false); if (!(owner instanceof UnixUserPrincipals.User)) throw new ProviderMismatchException(); if (owner instanceof UnixUserPrincipals.Group) throw new IOException("'owner' parameter is a group"); int uid = ((UnixUserPrincipals.User)owner).uid(); try { if (followLinks) { lchown(file, uid, -1); } else { chown(file, uid, -1); } } catch (UnixException x) { x.rethrowAsIOException(file); } } }
gpl-2.0
kalxas/QGIS
src/gui/editorwidgets/qgsrangeconfigdlg.cpp
7706
/*************************************************************************** qgsrangeconfigdlgbase.cpp -------------------------------------- Date : 5.1.2014 Copyright : (C) 2014 Matthias Kuhn Email : matthias at opengis dot ch *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include "qgsrangeconfigdlg.h" #include "qgsvectorlayer.h" QgsRangeConfigDlg::QgsRangeConfigDlg( QgsVectorLayer *vl, int fieldIdx, QWidget *parent ) : QgsEditorConfigWidget( vl, fieldIdx, parent ) { setupUi( this ); precisionSpinBox->setClearValue( 4 ); minimumSpinBox->setMinimum( std::numeric_limits<int>::lowest() ); minimumSpinBox->setMaximum( std::numeric_limits<int>::max() ); minimumSpinBox->setValue( std::numeric_limits<int>::lowest() ); maximumSpinBox->setMinimum( std::numeric_limits<int>::lowest() ); maximumSpinBox->setMaximum( std::numeric_limits<int>::max() ); maximumSpinBox->setValue( std::numeric_limits<int>::max() ); stepSpinBox->setMaximum( std::numeric_limits<int>::max() ); stepSpinBox->setValue( 1 ); stepSpinBox->setClearValue( 1 ); minimumDoubleSpinBox->setMinimum( std::numeric_limits<double>::lowest() ); minimumDoubleSpinBox->setMaximum( std::numeric_limits<double>::max() ); minimumDoubleSpinBox->setValue( std::numeric_limits<double>::min() ); maximumDoubleSpinBox->setMinimum( std::numeric_limits<double>::lowest() ); maximumDoubleSpinBox->setMaximum( std::numeric_limits<double>::max() ); maximumDoubleSpinBox->setValue( std::numeric_limits<double>::max() ); // Use integer here: stepDoubleSpinBox->setMaximum( std::numeric_limits<int>::max() ); stepDoubleSpinBox->setValue( 1 ); stepDoubleSpinBox->setClearValue( 1 ); QString text; const QVariant::Type fieldType( vl->fields().at( fieldIdx ).type() ); switch ( fieldType ) { case QVariant::Int: case QVariant::LongLong: case QVariant::Double: { // we use the double spin boxes for double OR long long field types, as QSpinBox does not have sufficient // available range for long long values rangeStackedWidget->setCurrentIndex( fieldType == QVariant::Int ? 0 : 1 ); if ( fieldType == QVariant::LongLong ) { minimumDoubleSpinBox->setDecimals( 0 ); maximumDoubleSpinBox->setDecimals( 0 ); stepDoubleSpinBox->setDecimals( 0 ); } rangeWidget->clear(); rangeWidget->addItem( tr( "Editable" ), QStringLiteral( "SpinBox" ) ); rangeWidget->addItem( tr( "Slider" ), QStringLiteral( "Slider" ) ); rangeWidget->addItem( tr( "Dial" ), QStringLiteral( "Dial" ) ); QVariant min; QVariant max; vl->minimumAndMaximumValue( fieldIdx, min, max ); text = tr( "Current minimum for this value is %1 and current maximum is %2." ).arg( min.toString(), max.toString() ); break; } default: text = tr( "Attribute has no integer or real type, therefore range is not usable." ); break; } // Hide precision for integer types if ( fieldType != QVariant::Double ) { precisionSpinBox->hide(); precisionLabel->hide(); } valuesLabel->setText( text ); connect( rangeWidget, static_cast<void ( QComboBox::* )( int )>( &QComboBox::currentIndexChanged ), this, &QgsRangeConfigDlg::rangeWidgetChanged ); connect( minimumSpinBox, static_cast < void ( QSpinBox::* )( int ) > ( &QSpinBox::valueChanged ), this, &QgsEditorConfigWidget::changed ); connect( maximumSpinBox, static_cast < void ( QSpinBox::* )( int ) > ( &QSpinBox::valueChanged ), this, &QgsEditorConfigWidget::changed ); connect( stepSpinBox, static_cast < void ( QSpinBox::* )( int ) > ( &QSpinBox::valueChanged ), this, &QgsEditorConfigWidget::changed ); connect( minimumDoubleSpinBox, static_cast < void ( QDoubleSpinBox::* )( double ) > ( &QDoubleSpinBox::valueChanged ), this, &QgsEditorConfigWidget::changed ); connect( maximumDoubleSpinBox, static_cast < void ( QDoubleSpinBox::* )( double ) > ( &QDoubleSpinBox::valueChanged ), this, &QgsEditorConfigWidget::changed ); connect( stepDoubleSpinBox, static_cast < void ( QDoubleSpinBox::* )( double ) > ( &QDoubleSpinBox::valueChanged ), this, &QgsEditorConfigWidget::changed ); connect( rangeWidget, static_cast<void ( QComboBox::* )( int )>( &QComboBox::currentIndexChanged ), this, &QgsEditorConfigWidget::changed ); connect( allowNullCheckBox, &QAbstractButton::toggled, this, &QgsEditorConfigWidget::changed ); connect( suffixLineEdit, &QLineEdit::textChanged, this, &QgsEditorConfigWidget::changed ); } QVariantMap QgsRangeConfigDlg::config() { QVariantMap cfg; switch ( layer()->fields().at( field() ).type() ) { case QVariant::Int: cfg.insert( QStringLiteral( "Min" ), minimumSpinBox->value() ); cfg.insert( QStringLiteral( "Max" ), maximumSpinBox->value() ); cfg.insert( QStringLiteral( "Step" ), stepSpinBox->value() ); break; // we use the double spin boxes for double OR long long field types, as QSpinBox does not have sufficient // available range for long long values case QVariant::Double: case QVariant::LongLong: cfg.insert( QStringLiteral( "Min" ), minimumDoubleSpinBox->value() ); cfg.insert( QStringLiteral( "Max" ), maximumDoubleSpinBox->value() ); cfg.insert( QStringLiteral( "Step" ), stepDoubleSpinBox->value() ); break; default: break; } cfg.insert( QStringLiteral( "Style" ), rangeWidget->currentData().toString() ); cfg.insert( QStringLiteral( "AllowNull" ), allowNullCheckBox->isChecked() ); cfg.insert( QStringLiteral( "Precision" ), precisionSpinBox->value() ); if ( !suffixLineEdit->text().isEmpty() ) { cfg.insert( QStringLiteral( "Suffix" ), suffixLineEdit->text() ); } return cfg; } void QgsRangeConfigDlg::setConfig( const QVariantMap &config ) { minimumDoubleSpinBox->setValue( config.value( QStringLiteral( "Min" ), std::numeric_limits<double>::lowest() ).toDouble( ) ); maximumDoubleSpinBox->setValue( config.value( QStringLiteral( "Max" ), std::numeric_limits<double>::max() ).toDouble( ) ); stepDoubleSpinBox->setValue( config.value( QStringLiteral( "Step" ), 1.0 ).toDouble() ); minimumSpinBox->setValue( config.value( QStringLiteral( "Min" ), std::numeric_limits<int>::lowest() ).toInt() ); maximumSpinBox->setValue( config.value( QStringLiteral( "Max" ), std::numeric_limits<int>::max() ).toInt() ); stepSpinBox->setValue( config.value( QStringLiteral( "Step" ), 1 ).toInt() ); rangeWidget->setCurrentIndex( rangeWidget->findData( config.value( QStringLiteral( "Style" ), "SpinBox" ) ) ); suffixLineEdit->setText( config.value( QStringLiteral( "Suffix" ) ).toString() ); allowNullCheckBox->setChecked( config.value( QStringLiteral( "AllowNull" ), true ).toBool() ); precisionSpinBox->setValue( config.value( QStringLiteral( "Precision" ), layer()->fields().at( field() ).precision() ).toInt( ) ); } void QgsRangeConfigDlg::rangeWidgetChanged( int index ) { const QString style = rangeWidget->itemData( index ).toString(); allowNullCheckBox->setEnabled( style == QLatin1String( "SpinBox" ) ); }
gpl-2.0
DaikiMaekawa/online-marshmallow-editor
src/theme-dawn.js
4152
/* ***** BEGIN LICENSE BLOCK ***** * Distributed under the BSD license: * * Copyright (c) 2010, Ajax.org B.V. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Ajax.org B.V. nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ define('ace/theme/dawn', ['require', 'exports', 'module' , 'ace/lib/dom'], function(require, exports, module) { exports.isDark = false; exports.cssClass = "ace-dawn"; exports.cssText = ".ace-dawn .ace_gutter {\ background: #ebebeb;\ color: #333\ }\ .ace-dawn .ace_print-margin {\ width: 1px;\ background: #e8e8e8\ }\ .ace-dawn {\ background-color: #F9F9F9;\ color: #080808\ }\ .ace-dawn .ace_cursor {\ color: #000000\ }\ .ace-dawn .ace_marker-layer .ace_selection {\ background: rgba(39, 95, 255, 0.30)\ }\ .ace-dawn.ace_multiselect .ace_selection.ace_start {\ box-shadow: 0 0 3px 0px #F9F9F9;\ border-radius: 2px\ }\ .ace-dawn .ace_marker-layer .ace_step {\ background: rgb(255, 255, 0)\ }\ .ace-dawn .ace_marker-layer .ace_bracket {\ margin: -1px 0 0 -1px;\ border: 1px solid rgba(75, 75, 126, 0.50)\ }\ .ace-dawn .ace_marker-layer .ace_active-line {\ background: rgba(36, 99, 180, 0.12)\ }\ .ace-dawn .ace_gutter-active-line {\ background-color : #dcdcdc\ }\ .ace-dawn .ace_marker-layer .ace_selected-word {\ border: 1px solid rgba(39, 95, 255, 0.30)\ }\ .ace-dawn .ace_invisible {\ color: rgba(75, 75, 126, 0.50)\ }\ .ace-dawn .ace_keyword,\ .ace-dawn .ace_meta {\ color: #794938\ }\ .ace-dawn .ace_constant,\ .ace-dawn .ace_constant.ace_character,\ .ace-dawn .ace_constant.ace_character.ace_escape,\ .ace-dawn .ace_constant.ace_other {\ color: #811F24\ }\ .ace-dawn .ace_invalid.ace_illegal {\ text-decoration: underline;\ font-style: italic;\ color: #F8F8F8;\ background-color: #B52A1D\ }\ .ace-dawn .ace_invalid.ace_deprecated {\ text-decoration: underline;\ font-style: italic;\ color: #B52A1D\ }\ .ace-dawn .ace_support {\ color: #691C97\ }\ .ace-dawn .ace_support.ace_constant {\ color: #B4371F\ }\ .ace-dawn .ace_fold {\ background-color: #794938;\ border-color: #080808\ }\ .ace-dawn .ace_list,\ .ace-dawn .ace_support.ace_function {\ color: #693A17\ }\ .ace-dawn .ace_storage {\ font-style: italic;\ color: #A71D5D\ }\ .ace-dawn .ace_string {\ color: #0B6125\ }\ .ace-dawn .ace_string.ace_regexp {\ color: #CF5628\ }\ .ace-dawn .ace_comment {\ font-style: italic;\ color: #5A525F\ }\ .ace-dawn .ace_variable {\ color: #234A97\ }\ .ace-dawn .ace_heading {\ color: #19356D\ }\ .ace-dawn .ace_indent-guide {\ background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAEklEQVQImWNgYGBgYLh/5+x/AAizA4hxNNsZAAAAAElFTkSuQmCC) right repeat-y;\ }"; var dom = require("../lib/dom"); dom.importCssString(exports.cssText, exports.cssClass); });
gpl-3.0
pmauduit/georchestra
mapfishapp/src/main/webapp/lib/proj4js/lib/defs/EPSG23032.js
80
Proj4js.defs["EPSG:23032"] = "+proj=utm +zone=32 +ellps=intl +units=m +no_defs";
gpl-3.0
sankha93/servo
components/script/dom/webgltexture.rs
14208
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl use canvas_traits::CanvasMsg; use dom::bindings::cell::DOMRefCell; use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextConstants as constants; use dom::bindings::codegen::Bindings::WebGLTextureBinding; use dom::bindings::global::GlobalRef; use dom::bindings::js::Root; use dom::bindings::reflector::reflect_dom_object; use dom::webgl_validations::types::{TexImageTarget, TexFormat, TexDataType}; use dom::webglobject::WebGLObject; use ipc_channel::ipc::{self, IpcSender}; use std::cell::Cell; use std::cmp; use webrender_traits::{WebGLCommand, WebGLError, WebGLResult, WebGLTextureId}; pub enum TexParameterValue { Float(f32), Int(i32), } const MAX_LEVEL_COUNT: usize = 31; const MAX_FACE_COUNT: usize = 6; no_jsmanaged_fields!([ImageInfo; MAX_LEVEL_COUNT * MAX_FACE_COUNT]); #[dom_struct] pub struct WebGLTexture { webgl_object: WebGLObject, id: WebGLTextureId, /// The target to which this texture was bound the first time target: Cell<Option<u32>>, is_deleted: Cell<bool>, /// Stores information about mipmap levels and cubemap faces. #[ignore_heap_size_of = "Arrays are cumbersome"] image_info_array: DOMRefCell<[ImageInfo; MAX_LEVEL_COUNT * MAX_FACE_COUNT]>, /// Face count can only be 1 or 6 face_count: Cell<u8>, base_mipmap_level: u32, #[ignore_heap_size_of = "Defined in ipc-channel"] renderer: IpcSender<CanvasMsg>, } impl WebGLTexture { fn new_inherited(renderer: IpcSender<CanvasMsg>, id: WebGLTextureId) -> WebGLTexture { WebGLTexture { webgl_object: WebGLObject::new_inherited(), id: id, target: Cell::new(None), is_deleted: Cell::new(false), face_count: Cell::new(0), base_mipmap_level: 0, image_info_array: DOMRefCell::new([ImageInfo::new(); MAX_LEVEL_COUNT * MAX_FACE_COUNT]), renderer: renderer, } } pub fn maybe_new(global: GlobalRef, renderer: IpcSender<CanvasMsg>) -> Option<Root<WebGLTexture>> { let (sender, receiver) = ipc::channel().unwrap(); renderer.send(CanvasMsg::WebGL(WebGLCommand::CreateTexture(sender))).unwrap(); let result = receiver.recv().unwrap(); result.map(|texture_id| WebGLTexture::new(global, renderer, texture_id)) } pub fn new(global: GlobalRef, renderer: IpcSender<CanvasMsg>, id: WebGLTextureId) -> Root<WebGLTexture> { reflect_dom_object(box WebGLTexture::new_inherited(renderer, id), global, WebGLTextureBinding::Wrap) } } impl WebGLTexture { pub fn id(&self) -> WebGLTextureId { self.id } // NB: Only valid texture targets come here pub fn bind(&self, target: u32) -> WebGLResult<()> { if self.is_deleted.get() { return Err(WebGLError::InvalidOperation); } if let Some(previous_target) = self.target.get() { if target != previous_target { return Err(WebGLError::InvalidOperation); } } else { // This is the first time binding let face_count = match target { constants::TEXTURE_2D => 1, constants::TEXTURE_CUBE_MAP => 6, _ => return Err(WebGLError::InvalidOperation) }; self.face_count.set(face_count); self.target.set(Some(target)); } let msg = CanvasMsg::WebGL(WebGLCommand::BindTexture(target, Some(self.id))); self.renderer.send(msg).unwrap(); Ok(()) } pub fn initialize(&self, target: TexImageTarget, width: u32, height: u32, depth: u32, internal_format: TexFormat, level: u32, data_type: Option<TexDataType>) -> WebGLResult<()> { let image_info = ImageInfo { width: width, height: height, depth: depth, internal_format: Some(internal_format), is_initialized: true, data_type: data_type, }; let face_index = self.face_index_for_target(&target); self.set_image_infos_at_level_and_face(level, face_index, image_info); Ok(()) } pub fn generate_mipmap(&self) -> WebGLResult<()> { let target = match self.target.get() { Some(target) => target, None => { error!("Cannot generate mipmap on texture that has no target!"); return Err(WebGLError::InvalidOperation); } }; let base_image_info = self.base_image_info().unwrap(); if !base_image_info.is_initialized() { return Err(WebGLError::InvalidOperation); } let is_cubic = target == constants::TEXTURE_CUBE_MAP; if is_cubic && !self.is_cube_complete() { return Err(WebGLError::InvalidOperation); } if !base_image_info.is_power_of_two() { return Err(WebGLError::InvalidOperation); } if base_image_info.is_compressed_format() { return Err(WebGLError::InvalidOperation); } self.renderer.send(CanvasMsg::WebGL(WebGLCommand::GenerateMipmap(target))).unwrap(); if self.base_mipmap_level + base_image_info.get_max_mimap_levels() == 0 { return Err(WebGLError::InvalidOperation); } let last_level = self.base_mipmap_level + base_image_info.get_max_mimap_levels() - 1; self.populate_mip_chain(self.base_mipmap_level, last_level) } pub fn delete(&self) { if !self.is_deleted.get() { self.is_deleted.set(true); let _ = self.renderer.send(CanvasMsg::WebGL(WebGLCommand::DeleteTexture(self.id))); } } pub fn is_deleted(&self) -> bool { self.is_deleted.get() } pub fn target(&self) -> Option<u32> { self.target.get() } /// We have to follow the conversion rules for GLES 2.0. See: /// https://www.khronos.org/webgl/public-mailing-list/archives/1008/msg00014.html /// pub fn tex_parameter(&self, target: u32, name: u32, value: TexParameterValue) -> WebGLResult<()> { let (int_value, _float_value) = match value { TexParameterValue::Int(int_value) => (int_value, int_value as f32), TexParameterValue::Float(float_value) => (float_value as i32, float_value), }; match name { constants::TEXTURE_MIN_FILTER => { match int_value as u32 { constants::NEAREST | constants::LINEAR | constants::NEAREST_MIPMAP_NEAREST | constants::LINEAR_MIPMAP_NEAREST | constants::NEAREST_MIPMAP_LINEAR | constants::LINEAR_MIPMAP_LINEAR => { self.renderer .send(CanvasMsg::WebGL(WebGLCommand::TexParameteri(target, name, int_value))) .unwrap(); Ok(()) }, _ => Err(WebGLError::InvalidEnum), } }, constants::TEXTURE_MAG_FILTER => { match int_value as u32 { constants::NEAREST | constants::LINEAR => { self.renderer .send(CanvasMsg::WebGL(WebGLCommand::TexParameteri(target, name, int_value))) .unwrap(); Ok(()) }, _ => Err(WebGLError::InvalidEnum), } }, constants::TEXTURE_WRAP_S | constants::TEXTURE_WRAP_T => { match int_value as u32 { constants::CLAMP_TO_EDGE | constants::MIRRORED_REPEAT | constants::REPEAT => { self.renderer .send(CanvasMsg::WebGL(WebGLCommand::TexParameteri(target, name, int_value))) .unwrap(); Ok(()) }, _ => Err(WebGLError::InvalidEnum), } }, _ => Err(WebGLError::InvalidEnum), } } pub fn populate_mip_chain(&self, first_level: u32, last_level: u32) -> WebGLResult<()> { let base_image_info = self.image_info_at_face(0, first_level); if !base_image_info.is_initialized() { return Err(WebGLError::InvalidOperation); } let mut ref_width = base_image_info.width; let mut ref_height = base_image_info.height; if ref_width == 0 || ref_height == 0 { return Err(WebGLError::InvalidOperation); } for level in (first_level + 1)..last_level { if ref_width == 1 && ref_height == 1 { break; } ref_width = cmp::max(1, ref_width / 2); ref_height = cmp::max(1, ref_height / 2); let image_info = ImageInfo { width: ref_width, height: ref_height, depth: 0, internal_format: base_image_info.internal_format, is_initialized: base_image_info.is_initialized(), data_type: base_image_info.data_type, }; self.set_image_infos_at_level(level, image_info); } Ok(()) } fn is_cube_complete(&self) -> bool { debug_assert!(self.face_count.get() == 6); let image_info = self.base_image_info().unwrap(); if !image_info.is_defined() { return false; } let ref_width = image_info.width; let ref_format = image_info.internal_format; for face in 0..self.face_count.get() { let current_image_info = self.image_info_at_face(face, self.base_mipmap_level); if !current_image_info.is_defined() { return false; } // Compares height with width to enforce square dimensions if current_image_info.internal_format != ref_format || current_image_info.width != ref_width || current_image_info.height != ref_width { return false; } } true } fn face_index_for_target(&self, target: &TexImageTarget) -> u8 { match *target { TexImageTarget::Texture2D => 0, TexImageTarget::CubeMapPositiveX => 0, TexImageTarget::CubeMapNegativeX => 1, TexImageTarget::CubeMapPositiveY => 2, TexImageTarget::CubeMapNegativeY => 3, TexImageTarget::CubeMapPositiveZ => 4, TexImageTarget::CubeMapNegativeZ => 5, } } pub fn image_info_for_target(&self, target: &TexImageTarget, level: u32) -> ImageInfo { let face_index = self.face_index_for_target(&target); self.image_info_at_face(face_index, level) } fn image_info_at_face(&self, face: u8, level: u32) -> ImageInfo { let pos = (level * self.face_count.get() as u32) + face as u32; self.image_info_array.borrow()[pos as usize] } fn set_image_infos_at_level(&self, level: u32, image_info: ImageInfo) { for face in 0..self.face_count.get() { self.set_image_infos_at_level_and_face(level, face, image_info); } } fn set_image_infos_at_level_and_face(&self, level: u32, face: u8, image_info: ImageInfo) { debug_assert!(face < self.face_count.get()); let pos = (level * self.face_count.get() as u32) + face as u32; self.image_info_array.borrow_mut()[pos as usize] = image_info; } fn base_image_info(&self) -> Option<ImageInfo> { assert!((self.base_mipmap_level as usize) < MAX_LEVEL_COUNT); Some(self.image_info_at_face(0, self.base_mipmap_level)) } } impl Drop for WebGLTexture { fn drop(&mut self) { self.delete(); } } #[derive(Clone, Copy, PartialEq, Debug, JSTraceable, HeapSizeOf)] pub struct ImageInfo { width: u32, height: u32, depth: u32, internal_format: Option<TexFormat>, is_initialized: bool, data_type: Option<TexDataType>, } impl ImageInfo { fn new() -> ImageInfo { ImageInfo { width: 0, height: 0, depth: 0, internal_format: None, is_initialized: false, data_type: None, } } pub fn width(&self) -> u32 { self.width } pub fn height(&self) -> u32 { self.height } pub fn internal_format(&self) -> Option<TexFormat> { self.internal_format } pub fn data_type(&self) -> Option<TexDataType> { self.data_type } fn is_power_of_two(&self) -> bool { self.width.is_power_of_two() && self.height.is_power_of_two() && self.depth.is_power_of_two() } fn is_initialized(&self) -> bool { self.is_initialized } fn is_defined(&self) -> bool { self.internal_format.is_some() } fn get_max_mimap_levels(&self) -> u32 { let largest = cmp::max(cmp::max(self.width, self.height), self.depth); if largest == 0 { return 0; } // FloorLog2(largest) + 1 (largest as f64).log2() as u32 + 1 } fn is_compressed_format(&self) -> bool { // TODO: Once Servo supports compressed formats, check for them here false } }
mpl-2.0
daniel-samson/SuiteCRM
include/SuiteEditor/SuiteEditorSettingsForDirectHTML.php
2973
<?php /** * * SugarCRM Community Edition is a customer relationship management program developed by * SugarCRM, Inc. Copyright (C) 2004-2013 SugarCRM Inc. * * SuiteCRM is an extension to SugarCRM Community Edition developed by SalesAgility Ltd. * Copyright (C) 2011 - 2018 SalesAgility Ltd. * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License version 3 as published by the * Free Software Foundation with the addition of the following permission added * to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK * IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY * OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License along with * this program; if not, see http://www.gnu.org/licenses or write to the Free * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. * * You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road, * SW2-130, Cupertino, CA 95014, USA. or at email address [email protected]. * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License version 3. * * In accordance with Section 7(b) of the GNU Affero General Public License version 3, * these Appropriate Legal Notices must retain the display of the "Powered by * SugarCRM" logo and "Supercharged by SuiteCRM" logo. If the display of the logos is not * reasonably feasible for technical reasons, the Appropriate Legal Notices must * display the words "Powered by SugarCRM" and "Supercharged by SuiteCRM". */ if (!defined('sugarEntry') || !sugarEntry) { die('Not A Valid Entry Point'); } /** * Class SuiteEditorSettingsForDirectHTML * * store and extends an associative settings for a simple textarea editor */ class SuiteEditorSettingsForDirectHTML extends SuiteEditorSettings { /** * Editor contents * @var string */ public $contents = ''; /** * target element, original textarea ID * @var string */ public $textareaId = 'text'; /** * Editor element ID * @var string */ public $elementId = 'editor'; /** * SuiteEditorSettingsForDirectHTML constructor. * * set the default settings for a simple textarea editor * and if settings argument exists extends it * @param null $settings (optional) */ public function __construct($settings = null) { parent::__construct($settings); } }
agpl-3.0
faulteh/gnu-social
lib/filenoticestream.php
3137
<?php /** * StatusNet - the distributed open-source microblogging tool * Copyright (C) 2011, StatusNet, Inc. * * Stream of notices that reference an URL * * PHP version 5 * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * @category Stream * @package StatusNet * @author Evan Prodromou <[email protected]> * @copyright 2011 StatusNet, Inc. * @license http://www.fsf.org/licensing/licenses/agpl-3.0.html AGPL 3.0 * @link http://status.net/ */ if (!defined('STATUSNET')) { // This check helps protect against security problems; // your code file can't be executed directly from the web. exit(1); } class FileNoticeStream extends ScopingNoticeStream { function __construct($file, $profile = -1) { if (is_int($profile) && $profile == -1) { $profile = Profile::current(); } parent::__construct(new CachingNoticeStream(new RawFileNoticeStream($file), 'file:notice-ids:'.$file->id), $profile); } } /** * Raw stream for a file * * @category Stream * @package StatusNet * @author Evan Prodromou <[email protected]> * @copyright 2011 StatusNet, Inc. * @license http://www.fsf.org/licensing/licenses/agpl-3.0.html AGPL 3.0 * @link http://status.net/ */ class RawFileNoticeStream extends NoticeStream { protected $file = null; function __construct($file) { $this->file = $file; } /** * Stream of notices linking to this URL * * @param integer $offset Offset to show; default is 0 * @param integer $limit Limit of notices to show * @param integer $since_id Since this notice * @param integer $max_id Before this notice * * @return array ids of notices that link to this file */ function getNoticeIds($offset, $limit, $since_id, $max_id) { $f2p = new File_to_post(); $f2p->selectAdd(); $f2p->selectAdd('post_id'); $f2p->file_id = $this->file->id; Notice::addWhereSinceId($f2p, $since_id, 'post_id', 'modified'); Notice::addWhereMaxId($f2p, $max_id, 'post_id', 'modified'); $f2p->orderBy('modified DESC, post_id DESC'); if (!is_null($offset)) { $f2p->limit($offset, $limit); } $ids = array(); if ($f2p->find()) { while ($f2p->fetch()) { $ids[] = $f2p->post_id; } } return $ids; } }
agpl-3.0
nonoz/mastodon
spec/controllers/tags_controller_spec.rb
2102
require 'rails_helper' RSpec.describe TagsController, type: :controller do render_views describe 'GET #show' do let!(:tag) { Fabricate(:tag, name: 'test') } let!(:local) { Fabricate(:status, tags: [ tag ], text: 'local #test') } let!(:remote) { Fabricate(:status, tags: [ tag ], text: 'remote #test', account: Fabricate(:account, domain: 'remote')) } let!(:late) { Fabricate(:status, tags: [ tag ], text: 'late #test') } context 'when tag exists' do it 'returns http success' do get :show, params: { id: 'test', max_id: late.id } expect(response).to have_http_status(:success) end it 'renders public layout' do get :show, params: { id: 'test', max_id: late.id } expect(response).to render_template layout: 'public' end it 'renders only local statuses if local parameter is specified' do get :show, params: { id: 'test', local: true, max_id: late.id } expect(assigns(:tag)).to eq tag statuses = assigns(:statuses).to_a expect(statuses.size).to eq 1 expect(statuses[0]).to eq local end it 'renders local and remote statuses if local parameter is not specified' do get :show, params: { id: 'test', max_id: late.id } expect(assigns(:tag)).to eq tag statuses = assigns(:statuses).to_a expect(statuses.size).to eq 2 expect(statuses[0]).to eq remote expect(statuses[1]).to eq local end it 'filters statuses by the current account' do user = Fabricate(:user) user.account.block!(remote.account) sign_in(user) get :show, params: { id: 'test', max_id: late.id } expect(assigns(:tag)).to eq tag statuses = assigns(:statuses).to_a expect(statuses.size).to eq 1 expect(statuses[0]).to eq local end end context 'when tag does not exist' do it 'returns http missing for non-existent tag' do get :show, params: { id: 'none' } expect(response).to have_http_status(:missing) end end end end
agpl-3.0
harterj/moose
test/src/materials/RandomMaterial.C
897
//* This file is part of the MOOSE framework //* https://www.mooseframework.org //* //* All rights reserved, see COPYRIGHT for full restrictions //* https://github.com/idaholab/moose/blob/master/COPYRIGHT //* //* Licensed under LGPL 2.1, please see LICENSE for details //* https://www.gnu.org/licenses/lgpl-2.1.html #include "RandomMaterial.h" registerMooseObject("MooseTestApp", RandomMaterial); InputParameters RandomMaterial::validParams() { InputParameters params = Material::validParams(); return params; } RandomMaterial::RandomMaterial(const InputParameters & parameters) : Material(parameters), _rand_real(declareProperty<Real>("rand_real")), _rand_long(declareProperty<unsigned long>("rand_long")) { setRandomResetFrequency(EXEC_TIMESTEP_END); } void RandomMaterial::computeQpProperties() { _rand_real[_qp] = getRandomReal(); _rand_long[_qp] = getRandomLong(); }
lgpl-2.1
icomms/rapidsms
apps/django_extensions/management/commands/runjob.py
2260
from django.core.management.base import LabelCommand from optparse import make_option from django_extensions.management.jobs import get_job, print_jobs class Command(LabelCommand): option_list = LabelCommand.option_list + ( make_option('--list', '-l', action="store_true", dest="list_jobs", help="List all jobs with their description"), ) help = "Run a single maintenance job." args = "[app_name] job_name" label = "" requires_model_validation = True def runjob(self, app_name, job_name, options): verbosity = int(options.get('verbosity', 1)) if verbosity>1: print "Executing job: %s (app: %s)" % (job_name, app_name) try: job = get_job(app_name, job_name) except KeyError, e: if app_name: print "Error: Job %s for applabel %s not found" % (app_name, job_name) else: print "Error: Job %s not found" % job_name print "Use -l option to view all the available jobs" return try: job().execute() except Exception, e: import traceback print "ERROR OCCURED IN JOB: %s (APP: %s)" % (job_name, app_name) print "START TRACEBACK:" traceback.print_exc() print "END TRACEBACK\n" def handle(self, *args, **options): app_name = None job_name = None if len(args)==1: job_name = args[0] elif len(args)==2: app_name, job_name = args if options.get('list_jobs'): print_jobs(only_scheduled=False, show_when=True, show_appname=True) else: if not job_name: print "Run a single maintenance job. Please specify the name of the job." return self.runjob(app_name, job_name, options) # Backwards compatibility for Django r9110 if not [opt for opt in Command.option_list if opt.dest=='verbosity']: Command.option_list += ( make_option('--verbosity', '-v', action="store", dest="verbosity", default='1', type='choice', choices=['0', '1', '2'], help="Verbosity level; 0=minimal output, 1=normal output, 2=all output"), )
lgpl-3.0
chbatey/kubernetes
pkg/kubectl/cmd/logs_test.go
3997
/* Copyright 2014 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cmd import ( "bytes" "io/ioutil" "net/http" "os" "strings" "testing" "github.com/spf13/cobra" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/api/unversioned" "k8s.io/kubernetes/pkg/client/restclient" "k8s.io/kubernetes/pkg/client/unversioned/fake" ) func TestLog(t *testing.T) { tests := []struct { name, version, podPath, logPath, container string pod *api.Pod }{ { name: "v1 - pod log", version: "v1", podPath: "/namespaces/test/pods/foo", logPath: "/api/v1/namespaces/test/pods/foo/log", pod: testPod(), }, } for _, test := range tests { logContent := "test log content" f, tf, codec := NewAPIFactory() tf.Client = &fake.RESTClient{ Codec: codec, Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) { switch p, m := req.URL.Path, req.Method; { case p == test.podPath && m == "GET": body := objBody(codec, test.pod) return &http.Response{StatusCode: 200, Body: body}, nil case p == test.logPath && m == "GET": body := ioutil.NopCloser(bytes.NewBufferString(logContent)) return &http.Response{StatusCode: 200, Body: body}, nil default: // Ensures no GET is performed when deleting by name t.Errorf("%s: unexpected request: %#v\n%#v", test.name, req.URL, req) return nil, nil } }), } tf.Namespace = "test" tf.ClientConfig = &restclient.Config{ContentConfig: restclient.ContentConfig{GroupVersion: &unversioned.GroupVersion{Version: test.version}}} buf := bytes.NewBuffer([]byte{}) cmd := NewCmdLogs(f, buf) cmd.Flags().Set("namespace", "test") cmd.Run(cmd, []string{"foo"}) if buf.String() != logContent { t.Errorf("%s: did not get expected log content. Got: %s", test.name, buf.String()) } } } func testPod() *api.Pod { return &api.Pod{ ObjectMeta: api.ObjectMeta{Name: "foo", Namespace: "test", ResourceVersion: "10"}, Spec: api.PodSpec{ RestartPolicy: api.RestartPolicyAlways, DNSPolicy: api.DNSClusterFirst, Containers: []api.Container{ { Name: "bar", }, }, }, } } func TestValidateLogFlags(t *testing.T) { f, _, _ := NewAPIFactory() tests := []struct { name string flags map[string]string expected string }{ { name: "since & since-time", flags: map[string]string{"since": "1h", "since-time": "2006-01-02T15:04:05Z"}, expected: "at most one of `sinceTime` or `sinceSeconds` may be specified", }, { name: "negative limit-bytes", flags: map[string]string{"limit-bytes": "-100"}, expected: "must be greater than 0", }, { name: "negative tail", flags: map[string]string{"tail": "-100"}, expected: "must be greater than or equal to 0", }, } for _, test := range tests { cmd := NewCmdLogs(f, bytes.NewBuffer([]byte{})) out := "" for flag, value := range test.flags { cmd.Flags().Set(flag, value) } // checkErr breaks tests in case of errors, plus we just // need to check errors returned by the command validation o := &LogsOptions{} cmd.Run = func(cmd *cobra.Command, args []string) { o.Complete(f, os.Stdout, cmd, args) out = o.Validate().Error() o.RunLogs() } cmd.Run(cmd, []string{"foo"}) if !strings.Contains(out, test.expected) { t.Errorf("%s: expected to find:\n\t%s\nfound:\n\t%s\n", test.name, test.expected, out) } } }
apache-2.0
kelltrick/roslyn
src/EditorFeatures/CSharpTest/DocumentationComments/XmlTagCompletionTests.cs
6201
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Threading.Tasks; using Microsoft.CodeAnalysis.Editor.Commands; using Microsoft.CodeAnalysis.Editor.CSharp.DocumentationComments; using Microsoft.CodeAnalysis.Editor.UnitTests.DocumentationComments; using Microsoft.CodeAnalysis.Editor.UnitTests.Utilities; using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces; using Microsoft.VisualStudio.Text.Operations; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.DocumentationComments { public class XmlTagCompletionTests : AbstractXmlTagCompletionTests { [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void SimpleTagCompletion() { var text = @" /// <goo$$ class c { }"; var expected = @" /// <goo>$$</goo> class c { }"; Verify(text, expected, '>'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void NestedTagCompletion() { var text = @" /// <summary> /// <goo$$ /// </summary> class c { }"; var expected = @" /// <summary> /// <goo>$$</goo> /// </summary> class c { }"; Verify(text, expected, '>'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void CompleteBeforeIncompleteTag() { var text = @" /// <goo$$ /// </summary> class c { }"; var expected = @" /// <goo>$$</goo> /// </summary> class c { }"; Verify(text, expected, '>'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void NotEmptyElement() { var text = @" /// <$$ class c { }"; var expected = @" /// <>$$ class c { }"; Verify(text, expected, '>'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void NotAlreadyCompleteTag() { var text = @" /// <goo$$</goo> class c { }"; var expected = @" /// <goo>$$</goo> class c { }"; Verify(text, expected, '>'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void NotAlreadyCompleteTag2() { var text = @" /// <goo$$ /// /// </goo> class c { }"; var expected = @" /// <goo>$$ /// /// </goo> class c { }"; Verify(text, expected, '>'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void SimpleSlashCompletion() { var text = @" /// <goo><$$ class c { }"; var expected = @" /// <goo></goo>$$ class c { }"; Verify(text, expected, '/'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void NestedSlashTagCompletion() { var text = @" /// <summary> /// <goo><$$ /// </summary> class c { }"; var expected = @" /// <summary> /// <goo></goo>$$ /// </summary> class c { }"; Verify(text, expected, '/'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void SlashCompleteBeforeIncompleteTag() { var text = @" /// <goo><$$ /// </summary> class c { }"; var expected = @" /// <goo></goo>$$ /// </summary> class c { }"; Verify(text, expected, '/'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void SlashNotEmptyElement() { var text = @" /// <><$$ class c { }"; var expected = @" /// <></$$ class c { }"; Verify(text, expected, '/'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void SlashNotAlreadyCompleteTag() { var text = @" /// <goo><$$goo> class c { }"; var expected = @" /// <goo></$$goo> class c { }"; Verify(text, expected, '/'); } [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void SlashNotAlreadyCompleteTag2() { var text = @" /// <goo> /// /// <$$goo> class c { }"; var expected = @" /// <goo> /// /// </$$goo> class c { }"; Verify(text, expected, '/'); } [WorkItem(638800, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/638800")] [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void NestedIdenticalTags() { var text = @" /// <goo><goo$$</goo> class c { }"; var expected = @" /// <goo><goo>$$</goo></goo> class c { }"; Verify(text, expected, '>'); } [WorkItem(638800, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/638800")] [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void MultipleNestedIdenticalTags() { var text = @" /// <goo><goo><goo$$</goo></goo> class c { }"; var expected = @" /// <goo><goo><goo>$$</goo></goo></goo> class c { }"; Verify(text, expected, '>'); } [WorkItem(638235, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/638235")] [WpfFact, Trait(Traits.Feature, Traits.Features.XmlTagCompletion)] public void SlashNotIfCloseTagFollows() { var text = @" /// <summary> /// <$$ /// </summary> class c { }"; var expected = @" /// <summary> /// </$$ /// </summary> class c { }"; Verify(text, expected, '/'); } internal override ICommandHandler<TypeCharCommandArgs> CreateCommandHandler(ITextUndoHistoryRegistry undoHistory) { return new XmlTagCompletionCommandHandler(undoHistory, TestWaitIndicator.Default); } protected override TestWorkspace CreateTestWorkspace(string initialMarkup) => TestWorkspace.CreateCSharp(initialMarkup); } }
apache-2.0
rvansa/JGroups
src/org/jgroups/annotations/MBean.java
748
package org.jgroups.annotations; import java.lang.annotation.*; /** * Optional annotation that exposes all public methods in the class * hierarchy (excluding Object) as MBean operations. All methods * are exposed if and only if exposeAll attribute is true. * <p> * * If a more fine grained MBean attribute and operation exposure is needed * do not use @MBean annotation but annotate fields and public methods directly * using @ManagedOperation and @ManagedAttribute annotations. * * * @author Chris Mills */ @Retention(RetentionPolicy.RUNTIME) @Target( { ElementType.TYPE }) @Inherited public @interface MBean { String objectName() default ""; boolean exposeAll() default false; String description() default ""; }
apache-2.0
yhakusyaku/chef-repo
cookbooks/powershell/providers/default.rb
3602
# # Author:: Seth Chisamore (<[email protected]>) # Copyright:: Copyright (c) 2011-2012 Opscode, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # action :run do begin # force our script to terminate and return an error code on failure # http://blogs.msdn.com/b/powershell/archive/2006/04/25/583241.aspx script_file.puts("$ErrorActionPreference = 'Stop'") script_file.puts(@new_resource.code) script_file.close # default flags flags = [ # Hides the copyright banner at startup. "-NoLogo", # Does not present an interactive prompt to the user. "-NonInteractive", # Does not load the Windows PowerShell profile. "-NoProfile", # always set the ExecutionPolicy flag # see http://technet.microsoft.com/en-us/library/ee176961.aspx "-ExecutionPolicy RemoteSigned", # Powershell will hang if STDIN is redirected # http://connect.microsoft.com/PowerShell/feedback/details/572313/powershell-exe-can-hang-if-stdin-is-redirected "-InputFormat None" ] # user-provided flags unless @new_resource.flags.nil? || @new_resource.flags.empty? flags << @new_resource.flags.strip end cwd = ensure_windows_friendly_path(@new_resource.cwd) prefix = @new_resource.interpreter command = ensure_windows_friendly_path(script_file.path) # Chef::Resource::Execute in Chef >= 0.10.8 has first-class Win32 support if Gem::Version.create(Chef::VERSION) >= Gem::Version.create("0.10.8") execute.cwd(cwd) execute.environment(@new_resource.environment) else # we have to fake `cwd` and `environment` on older versions of Chef prefix = "cd #{@new_resource.cwd} & #{prefix}" if @new_resource.cwd command = create_env_wrapper(command, @new_resource.environment) end command = "#{prefix} #{flags.join(' ')} -Command \"#{command}\"" execute.command(command) execute.creates(@new_resource.creates) execute.user(@new_resource.user) execute.group(@new_resource.group) execute.timeout(@new_resource.timeout) execute.returns(@new_resource.returns) execute.run_action(:run) @new_resource.updated_by_last_action(true) ensure unlink_script_file end end private def execute @execute ||= Chef::Resource::Execute.new(@new_resource.name, run_context) end def script_file @script_file ||= Tempfile.open(['chef-script', '.ps1']) end def unlink_script_file @script_file && @script_file.close! end # take advantage of PowerShell scriptblocks # to pass scoped environment variables to the # command. This is mainly only useful for versions # of Chef < 0.10.8 when Chef::Resource::Execute # did not support the 'environment' attribute. def create_env_wrapper(command, environment) if environment env_string = environment.map{ |k,v| "$env:#{k}='#{v}'" }.join('; ') "& { #{env_string}; #{command} }" else command end end def ensure_windows_friendly_path(path) if path path.gsub(::File::SEPARATOR, ::File::ALT_SEPARATOR) else path end end
apache-2.0
pwoodworth/intellij-community
platform/smRunner/src/com/intellij/execution/testframework/sm/runner/SMTestLocator.java
2205
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.testframework.sm.runner; import com.intellij.execution.Location; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import java.util.Collections; import java.util.List; import java.util.Map; /** * A parser for location URLs reported by test runners. * See {@link SMTestProxy#getLocation(Project, GlobalSearchScope)} for details. */ public interface SMTestLocator { @NotNull List<Location> getLocation(@NotNull String protocol, @NotNull String path, @NotNull Project project, @NotNull GlobalSearchScope scope); class Composite implements SMTestLocator, DumbAware { private final Map<String, ? extends SMTestLocator> myLocators; public Composite(@NotNull Pair<String, ? extends SMTestLocator> first, @NotNull Pair<String, ? extends SMTestLocator>... rest) { myLocators = ContainerUtil.newHashMap(first, rest); } @NotNull @Override public List<Location> getLocation(@NotNull String protocol, @NotNull String path, @NotNull Project project, @NotNull GlobalSearchScope scope) { SMTestLocator locator = myLocators.get(protocol); if (locator != null && (!DumbService.isDumb(project) || DumbService.isDumbAware(locator))) { return locator.getLocation(protocol, path, project, scope); } return Collections.emptyList(); } } }
apache-2.0
nvoron23/titan
titan-core/src/main/java/com/thinkaurelius/titan/graphdb/vertices/StandardVertex.java
3423
package com.thinkaurelius.titan.graphdb.vertices; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.thinkaurelius.titan.diskstorage.Entry; import com.thinkaurelius.titan.diskstorage.EntryList; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.SliceQuery; import com.thinkaurelius.titan.graphdb.internal.ElementLifeCycle; import com.thinkaurelius.titan.graphdb.internal.InternalRelation; import com.thinkaurelius.titan.graphdb.transaction.StandardTitanTx; import com.thinkaurelius.titan.graphdb.transaction.addedrelations.AddedRelationsContainer; import com.thinkaurelius.titan.graphdb.transaction.addedrelations.ConcurrentAddedRelations; import com.thinkaurelius.titan.graphdb.transaction.addedrelations.SimpleAddedRelations; import com.thinkaurelius.titan.util.datastructures.Retriever; import java.util.Collection; import java.util.Collections; import java.util.List; /** * @author Matthias Broecheler ([email protected]) */ public class StandardVertex extends AbstractVertex { private byte lifecycle; private volatile AddedRelationsContainer addedRelations=AddedRelationsContainer.EMPTY; public StandardVertex(final StandardTitanTx tx, final long id, byte lifecycle) { super(tx, id); this.lifecycle=lifecycle; } public synchronized final void updateLifeCycle(ElementLifeCycle.Event event) { this.lifecycle = ElementLifeCycle.update(lifecycle,event); } @Override public void removeRelation(InternalRelation r) { if (r.isNew()) addedRelations.remove(r); else if (r.isLoaded()) updateLifeCycle(ElementLifeCycle.Event.REMOVED_RELATION); else throw new IllegalArgumentException("Unexpected relation status: " + r.isRemoved()); } @Override public boolean addRelation(InternalRelation r) { Preconditions.checkArgument(r.isNew()); if (addedRelations==AddedRelationsContainer.EMPTY) { if (tx().getConfiguration().isSingleThreaded()) { addedRelations=new SimpleAddedRelations(); } else { synchronized (this) { if (addedRelations==AddedRelationsContainer.EMPTY) addedRelations=new ConcurrentAddedRelations(); } } } if (addedRelations.add(r)) { updateLifeCycle(ElementLifeCycle.Event.ADDED_RELATION); return true; } else return false; } @Override public List<InternalRelation> getAddedRelations(Predicate<InternalRelation> query) { return addedRelations.getView(query); } @Override public EntryList loadRelations(SliceQuery query, Retriever<SliceQuery, EntryList> lookup) { return (isNew()) ? EntryList.EMPTY_LIST : lookup.get(query); } @Override public boolean hasLoadedRelations(SliceQuery query) { return false; } @Override public boolean hasRemovedRelations() { return ElementLifeCycle.hasRemovedRelations(lifecycle); } @Override public boolean hasAddedRelations() { return ElementLifeCycle.hasAddedRelations(lifecycle); } @Override public synchronized void remove() { super.remove(); ((StandardVertex)it()).updateLifeCycle(ElementLifeCycle.Event.REMOVED); } @Override public byte getLifeCycle() { return lifecycle; } }
apache-2.0
bac/horizon
horizon/templatetags/shellfilter.py
1161
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import django from django import template from django.template import defaultfilters from django.utils import safestring if django.VERSION >= (1, 9): register = template.Library() else: register = template.base.Library() @register.filter(is_safe=True) @defaultfilters.stringfilter def shellfilter(value): """Replace HTML chars for shell usage.""" replacements = {'\\': '\\\\', '`': '\`', "'": "\\'", '"': '\\"'} for search, repl in replacements.items(): value = value.replace(search, repl) return safestring.mark_safe(value)
apache-2.0
ogrisel/scikit-learn
benchmarks/bench_glmnet.py
3872
""" To run this, you'll need to have installed. * glmnet-python * scikit-learn (of course) Does two benchmarks First, we fix a training set and increase the number of samples. Then we plot the computation time as function of the number of samples. In the second benchmark, we increase the number of dimensions of the training set. Then we plot the computation time as function of the number of dimensions. In both cases, only 10% of the features are informative. """ import numpy as np import gc from time import time from sklearn.datasets import make_regression alpha = 0.1 # alpha = 0.01 def rmse(a, b): return np.sqrt(np.mean((a - b) ** 2)) def bench(factory, X, Y, X_test, Y_test, ref_coef): gc.collect() # start time tstart = time() clf = factory(alpha=alpha).fit(X, Y) delta = (time() - tstart) # stop time print("duration: %0.3fs" % delta) print("rmse: %f" % rmse(Y_test, clf.predict(X_test))) print("mean coef abs diff: %f" % abs(ref_coef - clf.coef_.ravel()).mean()) return delta if __name__ == '__main__': from glmnet.elastic_net import Lasso as GlmnetLasso from sklearn.linear_model import Lasso as ScikitLasso # Delayed import of matplotlib.pyplot import matplotlib.pyplot as plt scikit_results = [] glmnet_results = [] n = 20 step = 500 n_features = 1000 n_informative = n_features / 10 n_test_samples = 1000 for i in range(1, n + 1): print('==================') print('Iteration %s of %s' % (i, n)) print('==================') X, Y, coef_ = make_regression( n_samples=(i * step) + n_test_samples, n_features=n_features, noise=0.1, n_informative=n_informative, coef=True) X_test = X[-n_test_samples:] Y_test = Y[-n_test_samples:] X = X[:(i * step)] Y = Y[:(i * step)] print("benchmarking scikit-learn: ") scikit_results.append(bench(ScikitLasso, X, Y, X_test, Y_test, coef_)) print("benchmarking glmnet: ") glmnet_results.append(bench(GlmnetLasso, X, Y, X_test, Y_test, coef_)) plt.clf() xx = range(0, n * step, step) plt.title('Lasso regression on sample dataset (%d features)' % n_features) plt.plot(xx, scikit_results, 'b-', label='scikit-learn') plt.plot(xx, glmnet_results, 'r-', label='glmnet') plt.legend() plt.xlabel('number of samples to classify') plt.ylabel('Time (s)') plt.show() # now do a benchmark where the number of points is fixed # and the variable is the number of features scikit_results = [] glmnet_results = [] n = 20 step = 100 n_samples = 500 for i in range(1, n + 1): print('==================') print('Iteration %02d of %02d' % (i, n)) print('==================') n_features = i * step n_informative = n_features / 10 X, Y, coef_ = make_regression( n_samples=(i * step) + n_test_samples, n_features=n_features, noise=0.1, n_informative=n_informative, coef=True) X_test = X[-n_test_samples:] Y_test = Y[-n_test_samples:] X = X[:n_samples] Y = Y[:n_samples] print("benchmarking scikit-learn: ") scikit_results.append(bench(ScikitLasso, X, Y, X_test, Y_test, coef_)) print("benchmarking glmnet: ") glmnet_results.append(bench(GlmnetLasso, X, Y, X_test, Y_test, coef_)) xx = np.arange(100, 100 + n * step, step) plt.figure('scikit-learn vs. glmnet benchmark results') plt.title('Regression in high dimensional spaces (%d samples)' % n_samples) plt.plot(xx, scikit_results, 'b-', label='scikit-learn') plt.plot(xx, glmnet_results, 'r-', label='glmnet') plt.legend() plt.xlabel('number of features') plt.ylabel('Time (s)') plt.axis('tight') plt.show()
bsd-3-clause
code4sac/ohana-api
spec/support/shared_contexts/rake.rb
584
require 'rake' shared_context 'rake' do let(:rake) { Rake::Application.new } let(:task_name) { self.class.top_level_description } let(:task_path) { "lib/tasks/#{task_name.split(':').first}" } subject { rake[task_name] } def loaded_files_excluding_current_rake_file $LOADED_FEATURES.reject { |file| file == Rails.root.join("#{task_path}.rake").to_s } end before do Rake.application = rake Rake.application.rake_require(task_path, [Rails.root.to_s], loaded_files_excluding_current_rake_file) Rake::Task.define_task(:environment) end end
bsd-3-clause
jbehave/jbehave-core
jbehave-core/src/main/java/org/jbehave/core/junit/AnnotatedEmbedderUtils.java
2883
package org.jbehave.core.junit; import org.jbehave.core.embedder.EmbedderClassLoader; import org.junit.runner.RunWith; public class AnnotatedEmbedderUtils { public static AnnotatedEmbedderRunner annotatedEmbedderRunner(String annotatedClassName, EmbedderClassLoader classLoader) { Class<?> runnerClass = runnerClass(annotatedClassName, classLoader); return newAnnotatedEmbedderRunner(runnerClass, annotatedClassName, classLoader); } private static Class<?> runnerClass(String annotatedClassName, EmbedderClassLoader classLoader) { Class<?> annotatedClass = loadClass(annotatedClassName, classLoader); RunWith annotation = annotatedClass.getAnnotation(RunWith.class); if (annotation != null) { return annotation.value(); } throw new MissingAnnotatedEmbedderRunner(annotatedClass); } private static AnnotatedEmbedderRunner newAnnotatedEmbedderRunner(Class<?> runnerClass, String annotatedClassName, EmbedderClassLoader classLoader) { try { Class<?> annotatedClass = loadClass(annotatedClassName, classLoader); return (AnnotatedEmbedderRunner) runnerClass.getConstructor(Class.class).newInstance(annotatedClass); } catch (Exception e) { throw new AnnotatedEmbedderRunnerInstantiationFailed(runnerClass, annotatedClassName, classLoader, e); } } private static Class<?> loadClass(String className, EmbedderClassLoader classLoader) { try { return classLoader.loadClass(className); } catch (ClassNotFoundException e) { throw new ClassLoadingFailed(className, classLoader, e); } } @SuppressWarnings("serial") public static class ClassLoadingFailed extends RuntimeException { public ClassLoadingFailed(String className, EmbedderClassLoader classLoader, Throwable cause) { super("Failed to load class " + className + " with classLoader " + classLoader, cause); } } @SuppressWarnings("serial") public static class AnnotatedEmbedderRunnerInstantiationFailed extends RuntimeException { public AnnotatedEmbedderRunnerInstantiationFailed(Class<?> runnerClass, String annotatedClassName, EmbedderClassLoader classLoader, Throwable cause) { super("Failed to instantiate annotated embedder runner " + runnerClass + " with annotatedClassName " + annotatedClassName + " and classLoader " + classLoader, cause); } } @SuppressWarnings("serial") public static class MissingAnnotatedEmbedderRunner extends RuntimeException { public MissingAnnotatedEmbedderRunner(Class<?> annotatedClass) { super("AnnotatedEmbedderRunner not specified via @RunWith annotation in annotatedClass " + annotatedClass); } } }
bsd-3-clause
jottenlips/aima-python
submissions/Miles/puzzles.py
3467
import search from math import (cos, pi) alabama_map = search.UndirectedGraph(dict( Birmingham=dict(Tuscaloosa=45, Auburn=120, Montgomery=86, Huntsville=90, Mobile=219, Dothan=197), Tuscaloosa=dict(Birmingham=45, Auburn=160, Montgomery=110, Huntsville=140, Mobile=211, Dothan=227), Auburn=dict(Birmingham=120, Tuscaloosa=160, Montgomery=57, Huntsville=212, Mobile=195, Dothan=130), Huntsville=dict(Birmingham=90, Tuscaloosa=140, Montgomery=166, Auburn=212, Mobile=302, Dothan=279), Montgomery=dict(Birmingham=86, Tuscaloosa=110, Auburn=57, Huntsville=166, Mobile=144, Dothan=120), Mobile=dict(Birmingham=219, Tuscaloosa=211, Auburn=195, Montgomery=144, Huntsville=302, Dothan=184), Dothan=dict(Birmingham=197, Tuscaloosa=227, Auburn=130, Montgomery=120, Huntsville=279, Mobile=184), Gardendale=dict(Birmingham=21), Fairhope=dict(Mobile=26, Birmingham=237) )) alabama_map.locations = dict( Birmingham=(50, 300), Tuscaloosa=(20, 270), Auburn=(50, 180), Montgomery=(45, 214), Huntsville=(50, 390), Mobile=(10, 85), Dothan=(100, 170), Gardendale=(50, 321), Fairhope=(10, 59)) alabama_puzzle = search.GraphProblem('Fairhope', 'Tuscaloosa', alabama_map) alabama_puzzle.description = ''' An abbreviated map of Middle Alabama. This map is unique, to the best of my knowledge. ''' # A trivial Problem definition of connect four # The goal is to get either 4 x's in a row or 4 o's in a row # The x's and o's represent the colors red and yellow class ConnectFour(search.Problem): def actions(self, state): # return connect_four Red = 'X' # the player Yellow = 'O' # the computer player1 = 'Winner' state1 = ConnectFour([['O', 'O', 'O', 'O'], ['O', 'O', 'O', 'O'], ['O', 'O', 'O', 'O'], ['O', 'O', 'O', 'O'], ]) state2 = ConnectFour([['X', 'O', 'O', 'O'], ['O', 'X', 'O', 'O'], ['O', 'O', 'X', 'O'], ['O', 'O', 'O', 'X'], ]) state3 = ConnectFour([['X', 'O', 'O', 'O'], ['X', 'O', 'O', 'O'], ['X', 'O', 'O', 'O'], ['X', 'O', 'O', 'O'], ]) state4 = ConnectFour([['O', 'X', 'O', 'O'], ['O', 'X', 'O', 'O'], ['O', 'X', 'O', 'O'], ['O', 'X', 'O', 'O'], ]) state5 = ConnectFour([['O', 'O', 'X', 'O'], ['O', 'O', 'X', 'O'], ['O', 'O', 'X', 'O'], ['O', 'O', 'X', 'O'], ]) return state1 def result(self, state, action): if action == 'X': return state2 else: return state1 def goal_test(self, state): return state def h(self, node): state = node.state if self.goal_test(state): return 1 else: return -1 miles_puzzle = ConnectFour('X') miles_puzzle.label = 'Connect Four' myPuzzles = [ alabama_puzzle, miles_puzzle ]
mit
drian84/testsvn
concrete/core/helpers/number.php
2791
<?php defined('C5_EXECUTE') or die('Access Denied'); class Concrete5_Helper_Number { /** Rounds the value only out to its most significant digit. * @param string $value * @return number */ public function flexround($value) { $v = explode('.', $value); $p = 0; for ($i = 0; $i < strlen($v[1]); $i++) { if (substr($v[1], $i, 1) > 0) { $p = $i+1; } } return round($value, $p); } /** Returns the Zend_Locale instance for the current locale. * @return Zend_Locale */ protected function getZendLocale() { static $zl; $locale = Localization::activeLocale(); if((!isset($zl)) || ($locale != $zl->toString())) { $zl = new Zend_Locale($locale); } return $zl; } /** Checks if a given string is valid representation of a number in the current locale. * @return bool */ public function isNumber($string) { return Zend_Locale_Format::isNumber($string, array('locale' => $this->getZendLocale())); } /** Checks if a given string is valid representation of an integer in the current locale. * @return bool */ public function isInteger($string) { return Zend_Locale_Format::isInteger($string, array('locale' => $this->getZendLocale())); } /** Format a number with grouped thousands and localized decimal point/thousands separator. * @param number $number The number being formatted. * @param int|null $precision [default: null] The wanted precision; if null or not specified the complete localized number will be returned. * @return string */ public function format($number, $precision = null) { if(!is_numeric($number)) { return $number; } $options = array('locale' => $this->getZendLocale()); if(is_numeric($precision)) { $options['precision'] = $precision; } return Zend_Locale_Format::toNumber($number, $options); } /** Parses a localized number representation and returns the number (or null if $string is not a valid number representation). * @param string $string The number representation to parse. * @param bool $trim [default: true] Remove spaces and new lines at the start/end of $string? * @param int|null $precision [default: null] The wanted precision; if null or not specified the complete number will be returned. * @return null|number */ public function unformat($string, $trim = true, $precision = null) { if(is_int($string) || is_float($string)) { return is_numeric($precision) ? round($string, $precision) : $string; } if(!is_string($string)) { return null; } if($trim) { $string = trim($string); } if(!(strlen($string) && $this->isNumber($string))) { return null; } $options = array('locale' => $this->getZendLocale()); if(is_numeric($precision)) { $options['precision'] = $precision; } return Zend_Locale_Format::getNumber($string, $options); } }
mit
peteryule/monotouch-samples
CustomCollectionViewLayoutAttributes/AppDelegate.cs
941
using System; using System.Collections.Generic; using System.Linq; using Foundation; using UIKit; using CoreGraphics; namespace SimpleCollectionView { [Register ("AppDelegate")] public partial class AppDelegate : UIApplicationDelegate { UIWindow window; UICollectionViewController simpleCollectionViewController; CircleLayout circleLayout; public override bool FinishedLaunching (UIApplication app, NSDictionary options) { window = new UIWindow (UIScreen.MainScreen.Bounds); circleLayout = new CircleLayout (); simpleCollectionViewController = new SimpleCollectionViewController (circleLayout); window.RootViewController = simpleCollectionViewController; window.MakeKeyAndVisible (); return true; } static void Main (string[] args) { UIApplication.Main (args, null, "AppDelegate"); } } }
mit
dsebastien/DefinitelyTyped
types/reactstrap/lib/CardGroup.d.ts
380
import * as React from 'react'; import { CSSModule } from '../index'; export interface CardGroupProps extends React.HTMLAttributes<HTMLElement> { [key: string]: any; tag?: string | React.ReactType; className?: string; cssModule?: CSSModule; } declare class CardGroup<T = {[key: string]: any}> extends React.Component<CardGroupProps> {} export default CardGroup;
mit
markogresak/DefinitelyTyped
types/gapi.client.webrisk/index.d.ts
690
// Type definitions for non-npm package Web Risk API v1 1.0 // Project: https://cloud.google.com/web-risk/ // Definitions by: Maxim Mazurok <https://github.com/Maxim-Mazurok> // Nick Amoscato <https://github.com/namoscato> // Declan Vong <https://github.com/declanvong> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // TypeScript Version: 2.8 // Referenced type definitions are generated by https://github.com/Maxim-Mazurok/google-api-typings-generator // In case of any problems please open issue in https://github.com/Maxim-Mazurok/google-api-typings-generator/issues/new /// <reference types="@maxim_mazurok/gapi.client.webrisk" />
mit
peteryule/monotouch-samples
MTGKTapper/MTGKTapper/MTGKTapperViewController.designer.cs
2066
// WARNING // // This file has been generated automatically by Xamarin Studio to store outlets and // actions made in the UI designer. If it is removed, they will be lost. // Manual changes to this file may not be handled correctly. // using Foundation; using System.CodeDom.Compiler; namespace MTGKTapper { [Register ("MTGKTapperViewController")] partial class MTGKTapperViewController { [Outlet] UIKit.UILabel currentScoreTextField { get; set; } [Outlet] UIKit.UILabel globalHighestScoreTextField { get; set; } [Outlet] UIKit.UIButton incrementScoreButton { get; set; } [Outlet] UIKit.UILabel playerBestScoreTextField { get; set; } [Outlet] UIKit.UIButton resetButton { get; set; } [Outlet] UIKit.UIButton selectLeaderBoardButton { get; set; } [Outlet] UIKit.UIButton showAchievementButton { get; set; } [Outlet] UIKit.UIButton showLeaderboardButton { get; set; } [Outlet] UIKit.UIButton submitScoreButton { get; set; } void ReleaseDesignerOutlets () { if (currentScoreTextField != null) { currentScoreTextField.Dispose (); currentScoreTextField = null; } if (playerBestScoreTextField != null) { playerBestScoreTextField.Dispose (); playerBestScoreTextField = null; } if (globalHighestScoreTextField != null) { globalHighestScoreTextField.Dispose (); globalHighestScoreTextField = null; } if (incrementScoreButton != null) { incrementScoreButton.Dispose (); incrementScoreButton = null; } if (submitScoreButton != null) { submitScoreButton.Dispose (); submitScoreButton = null; } if (selectLeaderBoardButton != null) { selectLeaderBoardButton.Dispose (); selectLeaderBoardButton = null; } if (showLeaderboardButton != null) { showLeaderboardButton.Dispose (); showLeaderboardButton = null; } if (showAchievementButton != null) { showAchievementButton.Dispose (); showAchievementButton = null; } if (resetButton != null) { resetButton.Dispose (); resetButton = null; } } } }
mit
justinwm/astor
examples/math_0c1e6fb/src/main/java/org/apache/commons/math3/linear/Array2DRowFieldMatrix.java
23181
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.linear; import java.io.Serializable; import org.apache.commons.math3.Field; import org.apache.commons.math3.FieldElement; import org.apache.commons.math3.exception.NoDataException; import org.apache.commons.math3.exception.DimensionMismatchException; import org.apache.commons.math3.exception.MathIllegalStateException; import org.apache.commons.math3.exception.NotStrictlyPositiveException; import org.apache.commons.math3.exception.NullArgumentException; import org.apache.commons.math3.exception.NumberIsTooSmallException; import org.apache.commons.math3.exception.OutOfRangeException; import org.apache.commons.math3.exception.util.LocalizedFormats; import org.apache.commons.math3.util.MathArrays; import org.apache.commons.math3.util.MathUtils; /** * Implementation of FieldMatrix<T> using a {@link FieldElement}[][] array to store entries. * <p> * As specified in the {@link FieldMatrix} interface, matrix element indexing * is 0-based -- e.g., <code>getEntry(0, 0)</code> * returns the element in the first row, first column of the matrix.</li></ul> * </p> * * @param <T> the type of the field elements */ public class Array2DRowFieldMatrix<T extends FieldElement<T>> extends AbstractFieldMatrix<T> implements Serializable { /** Serializable version identifier */ private static final long serialVersionUID = 7260756672015356458L; /** Entries of the matrix */ private T[][] data; /** * Creates a matrix with no data * @param field field to which the elements belong */ public Array2DRowFieldMatrix(final Field<T> field) { super(field); } /** * Create a new {@code FieldMatrix<T>} with the supplied row and column dimensions. * * @param field Field to which the elements belong. * @param rowDimension Number of rows in the new matrix. * @param columnDimension Number of columns in the new matrix. * @throws NotStrictlyPositiveException if row or column dimension is not positive. */ public Array2DRowFieldMatrix(final Field<T> field, final int rowDimension, final int columnDimension) throws NotStrictlyPositiveException { super(field, rowDimension, columnDimension); data = MathArrays.buildArray(field, rowDimension, columnDimension); } /** * Create a new {@code FieldMatrix<T>} using the input array as the underlying * data array. * <p>The input array is copied, not referenced. This constructor has * the same effect as calling {@link #Array2DRowFieldMatrix(FieldElement[][], boolean)} * with the second argument set to {@code true}.</p> * * @param d Data for the new matrix. * @throws DimensionMismatchException if {@code d} is not rectangular. * @throws NullArgumentException if {@code d} is {@code null}. * @throws NoDataException if there are not at least one row and one column. * @see #Array2DRowFieldMatrix(FieldElement[][], boolean) */ public Array2DRowFieldMatrix(final T[][] d) throws DimensionMismatchException, NullArgumentException, NoDataException { this(extractField(d), d); } /** * Create a new {@code FieldMatrix<T>} using the input array as the underlying * data array. * <p>The input array is copied, not referenced. This constructor has * the same effect as calling {@link #Array2DRowFieldMatrix(FieldElement[][], boolean)} * with the second argument set to {@code true}.</p> * * @param field Field to which the elements belong. * @param d Data for the new matrix. * @throws DimensionMismatchException if {@code d} is not rectangular. * @throws NullArgumentException if {@code d} is {@code null}. * @throws NoDataException if there are not at least one row and one column. * @see #Array2DRowFieldMatrix(FieldElement[][], boolean) */ public Array2DRowFieldMatrix(final Field<T> field, final T[][] d) throws DimensionMismatchException, NullArgumentException, NoDataException { super(field); copyIn(d); } /** * Create a new {@code FieldMatrix<T>} using the input array as the underlying * data array. * <p>If an array is built specially in order to be embedded in a * {@code FieldMatrix<T>} and not used directly, the {@code copyArray} may be * set to {@code false}. This will prevent the copying and improve * performance as no new array will be built and no data will be copied.</p> * * @param d Data for the new matrix. * @param copyArray Whether to copy or reference the input array. * @throws DimensionMismatchException if {@code d} is not rectangular. * @throws NoDataException if there are not at least one row and one column. * @throws NullArgumentException if {@code d} is {@code null}. * @see #Array2DRowFieldMatrix(FieldElement[][]) */ public Array2DRowFieldMatrix(final T[][] d, final boolean copyArray) throws DimensionMismatchException, NoDataException, NullArgumentException { this(extractField(d), d, copyArray); } /** * Create a new {@code FieldMatrix<T>} using the input array as the underlying * data array. * <p>If an array is built specially in order to be embedded in a * {@code FieldMatrix<T>} and not used directly, the {@code copyArray} may be * set to {@code false}. This will prevent the copying and improve * performance as no new array will be built and no data will be copied.</p> * * @param field Field to which the elements belong. * @param d Data for the new matrix. * @param copyArray Whether to copy or reference the input array. * @throws DimensionMismatchException if {@code d} is not rectangular. * @throws NoDataException if there are not at least one row and one column. * @throws NullArgumentException if {@code d} is {@code null}. * @see #Array2DRowFieldMatrix(FieldElement[][]) */ public Array2DRowFieldMatrix(final Field<T> field, final T[][] d, final boolean copyArray) throws DimensionMismatchException, NoDataException, NullArgumentException { super(field); if (copyArray) { copyIn(d); } else { MathUtils.checkNotNull(d); final int nRows = d.length; if (nRows == 0) { throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_ROW); } final int nCols = d[0].length; if (nCols == 0) { throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_COLUMN); } for (int r = 1; r < nRows; r++) { if (d[r].length != nCols) { throw new DimensionMismatchException(nCols, d[r].length); } } data = d; } } /** * Create a new (column) {@code FieldMatrix<T>} using {@code v} as the * data for the unique column of the created matrix. * The input array is copied. * * @param v Column vector holding data for new matrix. * @throws NoDataException if v is empty */ public Array2DRowFieldMatrix(final T[] v) throws NoDataException { this(extractField(v), v); } /** * Create a new (column) {@code FieldMatrix<T>} using {@code v} as the * data for the unique column of the created matrix. * The input array is copied. * * @param field Field to which the elements belong. * @param v Column vector holding data for new matrix. */ public Array2DRowFieldMatrix(final Field<T> field, final T[] v) { super(field); final int nRows = v.length; data = MathArrays.buildArray(getField(), nRows, 1); for (int row = 0; row < nRows; row++) { data[row][0] = v[row]; } } /** {@inheritDoc} */ @Override public FieldMatrix<T> createMatrix(final int rowDimension, final int columnDimension) throws NotStrictlyPositiveException { return new Array2DRowFieldMatrix<T>(getField(), rowDimension, columnDimension); } /** {@inheritDoc} */ @Override public FieldMatrix<T> copy() { return new Array2DRowFieldMatrix<T>(getField(), copyOut(), false); } /** * Add {@code m} to this matrix. * * @param m Matrix to be added. * @return {@code this} + m. * @throws MatrixDimensionMismatchException if {@code m} is not the same * size as this matrix. */ public Array2DRowFieldMatrix<T> add(final Array2DRowFieldMatrix<T> m) throws MatrixDimensionMismatchException { // safety check checkAdditionCompatible(m); final int rowCount = getRowDimension(); final int columnCount = getColumnDimension(); final T[][] outData = MathArrays.buildArray(getField(), rowCount, columnCount); for (int row = 0; row < rowCount; row++) { final T[] dataRow = data[row]; final T[] mRow = m.data[row]; final T[] outDataRow = outData[row]; for (int col = 0; col < columnCount; col++) { outDataRow[col] = dataRow[col].add(mRow[col]); } } return new Array2DRowFieldMatrix<T>(getField(), outData, false); } /** * Subtract {@code m} from this matrix. * * @param m Matrix to be subtracted. * @return {@code this} + m. * @throws MatrixDimensionMismatchException if {@code m} is not the same * size as this matrix. */ public Array2DRowFieldMatrix<T> subtract(final Array2DRowFieldMatrix<T> m) throws MatrixDimensionMismatchException { // safety check checkSubtractionCompatible(m); final int rowCount = getRowDimension(); final int columnCount = getColumnDimension(); final T[][] outData = MathArrays.buildArray(getField(), rowCount, columnCount); for (int row = 0; row < rowCount; row++) { final T[] dataRow = data[row]; final T[] mRow = m.data[row]; final T[] outDataRow = outData[row]; for (int col = 0; col < columnCount; col++) { outDataRow[col] = dataRow[col].subtract(mRow[col]); } } return new Array2DRowFieldMatrix<T>(getField(), outData, false); } /** * Postmultiplying this matrix by {@code m}. * * @param m Matrix to postmultiply by. * @return {@code this} * m. * @throws DimensionMismatchException if the number of columns of this * matrix is not equal to the number of rows of {@code m}. */ public Array2DRowFieldMatrix<T> multiply(final Array2DRowFieldMatrix<T> m) throws DimensionMismatchException { // safety check checkMultiplicationCompatible(m); final int nRows = this.getRowDimension(); final int nCols = m.getColumnDimension(); final int nSum = this.getColumnDimension(); final T[][] outData = MathArrays.buildArray(getField(), nRows, nCols); for (int row = 0; row < nRows; row++) { final T[] dataRow = data[row]; final T[] outDataRow = outData[row]; for (int col = 0; col < nCols; col++) { T sum = getField().getZero(); for (int i = 0; i < nSum; i++) { sum = sum.add(dataRow[i].multiply(m.data[i][col])); } outDataRow[col] = sum; } } return new Array2DRowFieldMatrix<T>(getField(), outData, false); } /** {@inheritDoc} */ @Override public T[][] getData() { return copyOut(); } /** * Get a reference to the underlying data array. * This methods returns internal data, <strong>not</strong> fresh copy of it. * * @return the 2-dimensional array of entries. */ public T[][] getDataRef() { return data; } /** {@inheritDoc} */ @Override public void setSubMatrix(final T[][] subMatrix, final int row, final int column) throws OutOfRangeException, NullArgumentException, NoDataException, DimensionMismatchException { if (data == null) { if (row > 0) { throw new MathIllegalStateException(LocalizedFormats.FIRST_ROWS_NOT_INITIALIZED_YET, row); } if (column > 0) { throw new MathIllegalStateException(LocalizedFormats.FIRST_COLUMNS_NOT_INITIALIZED_YET, column); } final int nRows = subMatrix.length; if (nRows == 0) { throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_ROW); } final int nCols = subMatrix[0].length; if (nCols == 0) { throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_COLUMN); } data = MathArrays.buildArray(getField(), subMatrix.length, nCols); for (int i = 0; i < data.length; ++i) { if (subMatrix[i].length != nCols) { throw new DimensionMismatchException(nCols, subMatrix[i].length); } System.arraycopy(subMatrix[i], 0, data[i + row], column, nCols); } } else { super.setSubMatrix(subMatrix, row, column); } } /** {@inheritDoc} */ @Override public T getEntry(final int row, final int column) throws OutOfRangeException { checkRowIndex(row); checkColumnIndex(column); return data[row][column]; } /** {@inheritDoc} */ @Override public void setEntry(final int row, final int column, final T value) throws OutOfRangeException { checkRowIndex(row); checkColumnIndex(column); data[row][column] = value; } /** {@inheritDoc} */ @Override public void addToEntry(final int row, final int column, final T increment) throws OutOfRangeException { checkRowIndex(row); checkColumnIndex(column); data[row][column] = data[row][column].add(increment); } /** {@inheritDoc} */ @Override public void multiplyEntry(final int row, final int column, final T factor) throws OutOfRangeException { checkRowIndex(row); checkColumnIndex(column); data[row][column] = data[row][column].multiply(factor); } /** {@inheritDoc} */ @Override public int getRowDimension() { return (data == null) ? 0 : data.length; } /** {@inheritDoc} */ @Override public int getColumnDimension() { return ((data == null) || (data[0] == null)) ? 0 : data[0].length; } /** {@inheritDoc} */ @Override public T[] operate(final T[] v) throws DimensionMismatchException { final int nRows = this.getRowDimension(); final int nCols = this.getColumnDimension(); if (v.length != nCols) { throw new DimensionMismatchException(v.length, nCols); } final T[] out = MathArrays.buildArray(getField(), nRows); for (int row = 0; row < nRows; row++) { final T[] dataRow = data[row]; T sum = getField().getZero(); for (int i = 0; i < nCols; i++) { sum = sum.add(dataRow[i].multiply(v[i])); } out[row] = sum; } return out; } /** {@inheritDoc} */ @Override public T[] preMultiply(final T[] v) throws DimensionMismatchException { final int nRows = getRowDimension(); final int nCols = getColumnDimension(); if (v.length != nRows) { throw new DimensionMismatchException(v.length, nRows); } final T[] out = MathArrays.buildArray(getField(), nCols); for (int col = 0; col < nCols; ++col) { T sum = getField().getZero(); for (int i = 0; i < nRows; ++i) { sum = sum.add(data[i][col].multiply(v[i])); } out[col] = sum; } return out; } /** {@inheritDoc} */ @Override public T walkInRowOrder(final FieldMatrixChangingVisitor<T> visitor) { final int rows = getRowDimension(); final int columns = getColumnDimension(); visitor.start(rows, columns, 0, rows - 1, 0, columns - 1); for (int i = 0; i < rows; ++i) { final T[] rowI = data[i]; for (int j = 0; j < columns; ++j) { rowI[j] = visitor.visit(i, j, rowI[j]); } } return visitor.end(); } /** {@inheritDoc} */ @Override public T walkInRowOrder(final FieldMatrixPreservingVisitor<T> visitor) { final int rows = getRowDimension(); final int columns = getColumnDimension(); visitor.start(rows, columns, 0, rows - 1, 0, columns - 1); for (int i = 0; i < rows; ++i) { final T[] rowI = data[i]; for (int j = 0; j < columns; ++j) { visitor.visit(i, j, rowI[j]); } } return visitor.end(); } /** {@inheritDoc} */ @Override public T walkInRowOrder(final FieldMatrixChangingVisitor<T> visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { checkSubMatrixIndex(startRow, endRow, startColumn, endColumn); visitor.start(getRowDimension(), getColumnDimension(), startRow, endRow, startColumn, endColumn); for (int i = startRow; i <= endRow; ++i) { final T[] rowI = data[i]; for (int j = startColumn; j <= endColumn; ++j) { rowI[j] = visitor.visit(i, j, rowI[j]); } } return visitor.end(); } /** {@inheritDoc} */ @Override public T walkInRowOrder(final FieldMatrixPreservingVisitor<T> visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { checkSubMatrixIndex(startRow, endRow, startColumn, endColumn); visitor.start(getRowDimension(), getColumnDimension(), startRow, endRow, startColumn, endColumn); for (int i = startRow; i <= endRow; ++i) { final T[] rowI = data[i]; for (int j = startColumn; j <= endColumn; ++j) { visitor.visit(i, j, rowI[j]); } } return visitor.end(); } /** {@inheritDoc} */ @Override public T walkInColumnOrder(final FieldMatrixChangingVisitor<T> visitor) { final int rows = getRowDimension(); final int columns = getColumnDimension(); visitor.start(rows, columns, 0, rows - 1, 0, columns - 1); for (int j = 0; j < columns; ++j) { for (int i = 0; i < rows; ++i) { final T[] rowI = data[i]; rowI[j] = visitor.visit(i, j, rowI[j]); } } return visitor.end(); } /** {@inheritDoc} */ @Override public T walkInColumnOrder(final FieldMatrixPreservingVisitor<T> visitor) { final int rows = getRowDimension(); final int columns = getColumnDimension(); visitor.start(rows, columns, 0, rows - 1, 0, columns - 1); for (int j = 0; j < columns; ++j) { for (int i = 0; i < rows; ++i) { visitor.visit(i, j, data[i][j]); } } return visitor.end(); } /** {@inheritDoc} */ @Override public T walkInColumnOrder(final FieldMatrixChangingVisitor<T> visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { checkSubMatrixIndex(startRow, endRow, startColumn, endColumn); visitor.start(getRowDimension(), getColumnDimension(), startRow, endRow, startColumn, endColumn); for (int j = startColumn; j <= endColumn; ++j) { for (int i = startRow; i <= endRow; ++i) { final T[] rowI = data[i]; rowI[j] = visitor.visit(i, j, rowI[j]); } } return visitor.end(); } /** {@inheritDoc} */ @Override public T walkInColumnOrder(final FieldMatrixPreservingVisitor<T> visitor, final int startRow, final int endRow, final int startColumn, final int endColumn) throws OutOfRangeException, NumberIsTooSmallException { checkSubMatrixIndex(startRow, endRow, startColumn, endColumn); visitor.start(getRowDimension(), getColumnDimension(), startRow, endRow, startColumn, endColumn); for (int j = startColumn; j <= endColumn; ++j) { for (int i = startRow; i <= endRow; ++i) { visitor.visit(i, j, data[i][j]); } } return visitor.end(); } /** * Get a fresh copy of the underlying data array. * * @return a copy of the underlying data array. */ private T[][] copyOut() { final int nRows = this.getRowDimension(); final T[][] out = MathArrays.buildArray(getField(), nRows, getColumnDimension()); // can't copy 2-d array in one shot, otherwise get row references for (int i = 0; i < nRows; i++) { System.arraycopy(data[i], 0, out[i], 0, data[i].length); } return out; } /** * Replace data with a fresh copy of the input array. * * @param in Data to copy. * @throws NoDataException if the input array is empty. * @throws DimensionMismatchException if the input array is not rectangular. * @throws NullArgumentException if the input array is {@code null}. */ private void copyIn(final T[][] in) throws NullArgumentException, NoDataException, DimensionMismatchException { setSubMatrix(in, 0, 0); } }
gpl-2.0
yuanmouren1hao/CDML-SEU
lib/exe/ajax.php
11247
<?php /** * DokuWiki AJAX call handler * * @license GPL 2 (http://www.gnu.org/licenses/gpl.html) * @author Andreas Gohr <[email protected]> */ if(!defined('DOKU_INC')) define('DOKU_INC',dirname(__FILE__).'/../../'); require_once(DOKU_INC.'inc/init.php'); //close session session_write_close(); header('Content-Type: text/html; charset=utf-8'); //call the requested function if($INPUT->post->has('call')){ $call = $INPUT->post->str('call'); }else if($INPUT->get->has('call')){ $call = $INPUT->get->str('call'); }else{ exit; } $callfn = 'ajax_'.$call; if(function_exists($callfn)){ $callfn(); }else{ $evt = new Doku_Event('AJAX_CALL_UNKNOWN', $call); if ($evt->advise_before()) { print "AJAX call '".htmlspecialchars($call)."' unknown!\n"; exit; } $evt->advise_after(); unset($evt); } /** * Searches for matching pagenames * * @author Andreas Gohr <[email protected]> */ function ajax_qsearch(){ global $lang; global $INPUT; $maxnumbersuggestions = 50; $query = $INPUT->post->str('q'); if(empty($query)) $query = $INPUT->get->str('q'); if(empty($query)) return; $query = urldecode($query); $data = ft_pageLookup($query, true, useHeading('navigation')); if(!count($data)) return; print '<strong>'.$lang['quickhits'].'</strong>'; print '<ul>'; $counter = 0; foreach($data as $id => $title){ if (useHeading('navigation')) { $name = $title; } else { $ns = getNS($id); if($ns){ $name = noNS($id).' ('.$ns.')'; }else{ $name = $id; } } echo '<li>' . html_wikilink(':'.$id,$name) . '</li>'; $counter ++; if($counter > $maxnumbersuggestions) { echo '<li>...</li>'; break; } } print '</ul>'; } /** * Support OpenSearch suggestions * * @link http://www.opensearch.org/Specifications/OpenSearch/Extensions/Suggestions/1.0 * @author Mike Frysinger <[email protected]> */ function ajax_suggestions() { global $INPUT; $query = cleanID($INPUT->post->str('q')); if(empty($query)) $query = cleanID($INPUT->get->str('q')); if(empty($query)) return; $data = ft_pageLookup($query); if(!count($data)) return; $data = array_keys($data); // limit results to 15 hits $data = array_slice($data, 0, 15); $data = array_map('trim',$data); $data = array_map('noNS',$data); $data = array_unique($data); sort($data); /* now construct a json */ $suggestions = array( $query, // the original query $data, // some suggestions array(), // no description array() // no urls ); $json = new JSON(); header('Content-Type: application/x-suggestions+json'); print $json->encode($suggestions); } /** * Refresh a page lock and save draft * * Andreas Gohr <[email protected]> */ function ajax_lock(){ global $conf; global $lang; global $ID; global $INFO; global $INPUT; $ID = cleanID($INPUT->post->str('id')); if(empty($ID)) return; $INFO = pageinfo(); if (!$INFO['writable']) { echo 'Permission denied'; return; } if(!checklock($ID)){ lock($ID); echo 1; } if($conf['usedraft'] && $INPUT->post->str('wikitext')){ $client = $_SERVER['REMOTE_USER']; if(!$client) $client = clientIP(true); $draft = array('id' => $ID, 'prefix' => substr($INPUT->post->str('prefix'), 0, -1), 'text' => $INPUT->post->str('wikitext'), 'suffix' => $INPUT->post->str('suffix'), 'date' => $INPUT->post->int('date'), 'client' => $client, ); $cname = getCacheName($draft['client'].$ID,'.draft'); if(io_saveFile($cname,serialize($draft))){ echo $lang['draftdate'].' '.dformat(); } } } /** * Delete a draft * * @author Andreas Gohr <[email protected]> */ function ajax_draftdel(){ global $INPUT; $id = cleanID($INPUT->str('id')); if(empty($id)) return; $client = $_SERVER['REMOTE_USER']; if(!$client) $client = clientIP(true); $cname = getCacheName($client.$id,'.draft'); @unlink($cname); } /** * Return subnamespaces for the Mediamanager * * @author Andreas Gohr <[email protected]> */ function ajax_medians(){ global $conf; global $INPUT; // wanted namespace $ns = cleanID($INPUT->post->str('ns')); $dir = utf8_encodeFN(str_replace(':','/',$ns)); $lvl = count(explode(':',$ns)); $data = array(); search($data,$conf['mediadir'],'search_index',array('nofiles' => true),$dir); foreach(array_keys($data) as $item){ $data[$item]['level'] = $lvl+1; } echo html_buildlist($data, 'idx', 'media_nstree_item', 'media_nstree_li'); } /** * Return list of files for the Mediamanager * * @author Andreas Gohr <[email protected]> */ function ajax_medialist(){ global $NS; global $INPUT; $NS = cleanID($INPUT->post->str('ns')); $sort = $INPUT->post->bool('recent') ? 'date' : 'natural'; if ($INPUT->post->str('do') == 'media') { tpl_mediaFileList(); } else { tpl_mediaContent(true, $sort); } } /** * Return the content of the right column * (image details) for the Mediamanager * * @author Kate Arzamastseva <[email protected]> */ function ajax_mediadetails(){ global $IMG, $JUMPTO, $REV, $fullscreen, $INPUT; $fullscreen = true; require_once(DOKU_INC.'lib/exe/mediamanager.php'); $image = ''; if ($INPUT->has('image')) $image = cleanID($INPUT->str('image')); if (isset($IMG)) $image = $IMG; if (isset($JUMPTO)) $image = $JUMPTO; $rev = false; if (isset($REV) && !$JUMPTO) $rev = $REV; html_msgarea(); tpl_mediaFileDetails($image, $rev); } /** * Returns image diff representation for mediamanager * @author Kate Arzamastseva <[email protected]> */ function ajax_mediadiff(){ global $NS; global $INPUT; $image = ''; if ($INPUT->has('image')) $image = cleanID($INPUT->str('image')); $NS = getNS($image); $auth = auth_quickaclcheck("$NS:*"); media_diff($image, $NS, $auth, true); } function ajax_mediaupload(){ global $NS, $MSG, $INPUT; $id = ''; if ($_FILES['qqfile']['tmp_name']) { $id = $INPUT->post->str('mediaid', $_FILES['qqfile']['name']); } elseif ($INPUT->get->has('qqfile')) { $id = $INPUT->get->str('qqfile'); } $id = cleanID($id); $NS = $INPUT->str('ns'); $ns = $NS.':'.getNS($id); $AUTH = auth_quickaclcheck("$ns:*"); if($AUTH >= AUTH_UPLOAD) { io_createNamespace("$ns:xxx", 'media'); } if ($_FILES['qqfile']['error']) unset($_FILES['qqfile']); $res = false; if ($_FILES['qqfile']['tmp_name']) $res = media_upload($NS, $AUTH, $_FILES['qqfile']); if ($INPUT->get->has('qqfile')) $res = media_upload_xhr($NS, $AUTH); if($res) { $result = array( 'success' => true, 'link' => media_managerURL(array('ns' => $ns, 'image' => $NS . ':' . $id), '&'), 'id' => $NS . ':' . $id, 'ns' => $NS ); } else { $error = ''; if(isset($MSG)) { foreach($MSG as $msg) { $error .= $msg['msg']; } } $result = array( 'error' => $error, 'ns' => $NS ); } $json = new JSON; header('Content-Type: application/json'); echo $json->encode($result); } /** * Return sub index for index view * * @author Andreas Gohr <[email protected]> */ function ajax_index(){ global $conf; global $INPUT; // wanted namespace $ns = cleanID($INPUT->post->str('idx')); $dir = utf8_encodeFN(str_replace(':','/',$ns)); $lvl = count(explode(':',$ns)); $data = array(); search($data,$conf['datadir'],'search_index',array('ns' => $ns),$dir); foreach(array_keys($data) as $item){ $data[$item]['level'] = $lvl+1; } echo html_buildlist($data, 'idx', 'html_list_index', 'html_li_index'); } /** * List matching namespaces and pages for the link wizard * * @author Andreas Gohr <[email protected]> */ function ajax_linkwiz(){ global $conf; global $lang; global $INPUT; $q = ltrim(trim($INPUT->post->str('q')),':'); $id = noNS($q); $ns = getNS($q); $ns = cleanID($ns); $id = cleanID($id); $nsd = utf8_encodeFN(str_replace(':','/',$ns)); $data = array(); if($q && !$ns){ // use index to lookup matching pages $pages = ft_pageLookup($id,true); // result contains matches in pages and namespaces // we now extract the matching namespaces to show // them seperately $dirs = array(); foreach($pages as $pid => $title){ if(strpos(noNS($pid),$id) === false){ // match was in the namespace $dirs[getNS($pid)] = 1; // assoc array avoids dupes }else{ // it is a matching page, add it to the result $data[] = array( 'id' => $pid, 'title' => $title, 'type' => 'f', ); } unset($pages[$pid]); } foreach($dirs as $dir => $junk){ $data[] = array( 'id' => $dir, 'type' => 'd', ); } }else{ $opts = array( 'depth' => 1, 'listfiles' => true, 'listdirs' => true, 'pagesonly' => true, 'firsthead' => true, 'sneakyacl' => $conf['sneaky_index'], ); if($id) $opts['filematch'] = '^.*\/'.$id; if($id) $opts['dirmatch'] = '^.*\/'.$id; search($data,$conf['datadir'],'search_universal',$opts,$nsd); // add back to upper if($ns){ array_unshift($data,array( 'id' => getNS($ns), 'type' => 'u', )); } } // fixme sort results in a useful way ? if(!count($data)){ echo $lang['nothingfound']; exit; } // output the found data $even = 1; foreach($data as $item){ $even *= -1; //zebra if(($item['type'] == 'd' || $item['type'] == 'u') && $item['id']) $item['id'] .= ':'; $link = wl($item['id']); echo '<div class="'.(($even > 0)?'even':'odd').' type_'.$item['type'].'">'; if($item['type'] == 'u'){ $name = $lang['upperns']; }else{ $name = htmlspecialchars($item['id']); } echo '<a href="'.$link.'" title="'.htmlspecialchars($item['id']).'" class="wikilink1">'.$name.'</a>'; if(!blank($item['title'])){ echo '<span>'.htmlspecialchars($item['title']).'</span>'; } echo '</div>'; } } //Setup VIM: ex: et ts=2 :
gpl-2.0
samurai0000000/qemu
android/base/containers/PodVector_unittest.cpp
3387
// Copyright 2014 The Android Open Source Project // // This software is licensed under the terms of the GNU General Public // License version 2, as published by the Free Software Foundation, and // may be copied, distributed, and modified under those terms. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. #include "android/base/containers/PodVector.h" #include <gtest/gtest.h> namespace android { namespace base { static int hashIndex(size_t n) { return static_cast<int>(((n >> 14) * 13773) + (n * 51)); } TEST(PodVector, Empty) { PodVector<int> v; EXPECT_TRUE(v.empty()); EXPECT_EQ(0U, v.size()); } TEST(PodVector, AppendOneItem) { PodVector<int> v; v.append(10234); EXPECT_FALSE(v.empty()); EXPECT_EQ(1U, v.size()); EXPECT_EQ(10234, v[0]); } TEST(PodVector, AppendLotsOfItems) { PodVector<int> v; const size_t kMaxCount = 10000; for (size_t n = 0; n < kMaxCount; ++n) { v.append(hashIndex(n)); } EXPECT_EQ(kMaxCount, v.size()); for (size_t n = 0; n < kMaxCount; ++n) { EXPECT_EQ(hashIndex(n), v[n]) << "At index " << n; } } TEST(PodVector, RemoveFrontItems) { PodVector<int> v; const size_t kMaxCount = 100; for (size_t n = 0; n < kMaxCount; ++n) { v.append(hashIndex(n)); } EXPECT_EQ(kMaxCount, v.size()); for (size_t n = 0; n < kMaxCount; ++n) { EXPECT_EQ(hashIndex(n), v[0]) << "At index " << n; v.remove(0U); EXPECT_EQ(kMaxCount - n - 1U, v.size()) << "At index " << n; } } TEST(PodVector, PrependItems) { PodVector<int> v; const size_t kMaxCount = 100; for (size_t n = 0; n < kMaxCount; ++n) { v.prepend(hashIndex(n)); } EXPECT_EQ(kMaxCount, v.size()); for (size_t n = 0; n < kMaxCount; ++n) { EXPECT_EQ(hashIndex(kMaxCount - n - 1), v[n]) << "At index " << n; } } TEST(PodVector, ResizeExpands) { PodVector<int> v; const size_t kMaxCount = 100; const size_t kMaxCount2 = 10000; for (size_t n = 0; n < kMaxCount; ++n) { v.append(hashIndex(n)); } EXPECT_EQ(kMaxCount, v.size()); v.resize(kMaxCount2); EXPECT_EQ(kMaxCount2, v.size()); for (size_t n = 0; n < kMaxCount; ++n) { EXPECT_EQ(hashIndex(n), v[n]) << "At index " << n; } } TEST(PodVector, ResizeTruncates) { PodVector<int> v; const size_t kMaxCount = 10000; const size_t kMaxCount2 = 10; for (size_t n = 0; n < kMaxCount; ++n) { v.append(hashIndex(n)); } EXPECT_EQ(kMaxCount, v.size()); v.resize(kMaxCount2); EXPECT_EQ(kMaxCount2, v.size()); for (size_t n = 0; n < kMaxCount2; ++n) { EXPECT_EQ(hashIndex(n), v[n]) << "At index " << n; } } TEST(PodVector, AssignmentOperator) { PodVector<int> v1; const size_t kMaxCount = 10000; for (size_t n = 0; n < kMaxCount; ++n) { v1.append(hashIndex(n)); } EXPECT_EQ(kMaxCount, v1.size()); PodVector<int> v2; v2 = v1; v1.reserve(0); EXPECT_EQ(kMaxCount, v2.size()); for (size_t n = 0; n < kMaxCount; ++n) { EXPECT_EQ(hashIndex(n), v2[n]) << "At index " << n; } } } // namespace base } // namespace android
gpl-2.0
fwartner/paste
assets/ace/src/mode-makefile.js
11458
define("ace/mode/sh_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"], function(require, exports, module) { "use strict"; var oop = require("../lib/oop"); var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules; var reservedKeywords = exports.reservedKeywords = ( '!|{|}|case|do|done|elif|else|'+ 'esac|fi|for|if|in|then|until|while|'+ '&|;|export|local|read|typeset|unset|'+ 'elif|select|set' ); var languageConstructs = exports.languageConstructs = ( '[|]|alias|bg|bind|break|builtin|'+ 'cd|command|compgen|complete|continue|'+ 'dirs|disown|echo|enable|eval|exec|'+ 'exit|fc|fg|getopts|hash|help|history|'+ 'jobs|kill|let|logout|popd|printf|pushd|'+ 'pwd|return|set|shift|shopt|source|'+ 'suspend|test|times|trap|type|ulimit|'+ 'umask|unalias|wait' ); var ShHighlightRules = function() { var keywordMapper = this.createKeywordMapper({ "keyword": reservedKeywords, "support.function.builtin": languageConstructs, "invalid.deprecated": "debugger" }, "identifier"); var integer = "(?:(?:[1-9]\\d*)|(?:0))"; var fraction = "(?:\\.\\d+)"; var intPart = "(?:\\d+)"; var pointFloat = "(?:(?:" + intPart + "?" + fraction + ")|(?:" + intPart + "\\.))"; var exponentFloat = "(?:(?:" + pointFloat + "|" + intPart + ")" + ")"; var floatNumber = "(?:" + exponentFloat + "|" + pointFloat + ")"; var fileDescriptor = "(?:&" + intPart + ")"; var variableName = "[a-zA-Z_][a-zA-Z0-9_]*"; var variable = "(?:(?:\\$" + variableName + ")|(?:" + variableName + "=))"; var builtinVariable = "(?:\\$(?:SHLVL|\\$|\\!|\\?))"; var func = "(?:" + variableName + "\\s*\\(\\))"; this.$rules = { "start" : [{ token : "constant", regex : /\\./ }, { token : ["text", "comment"], regex : /(^|\s)(#.*)$/ }, { token : "string", regex : '"', push : [{ token : "constant.language.escape", regex : /\\(?:[$abeEfnrtv\\'"]|x[a-fA-F\d]{1,2}|u[a-fA-F\d]{4}([a-fA-F\d]{4})?|c.|\d{1,3})/ }, { token : "constant", regex : /\$\w+/ }, { token : "string", regex : '"', next: "pop" }, { defaultToken: "string" }] }, { stateName: "heredoc", onMatch : function(value, currentState, stack) { var next = value[2] == '-' ? "indentedHeredoc" : "heredoc"; var tokens = value.split(this.splitRegex); stack.push(next, tokens[4]); return [ {type:"constant", value: tokens[1]}, {type:"text", value: tokens[2]}, {type:"string", value: tokens[3]}, {type:"support.class", value: tokens[4]}, {type:"string", value: tokens[5]} ]; }, regex : "(<<-?)(\\s*)(['\"`]?)([\\w\-]+)(['\"`]?)", rules: { heredoc: [{ onMatch: function(value, currentState, stack) { if (value === stack[1]) { stack.shift(); stack.shift(); this.next = stack[0] || "start"; return "support.class"; } this.next = ""; return "string"; }, regex: ".*$", next: "start" }], indentedHeredoc: [{ token: "string", regex: "^ +" }, { onMatch: function(value, currentState, stack) { if (value === stack[1]) { stack.shift(); stack.shift(); this.next = stack[0] || "start"; return "support.class"; } this.next = ""; return "string"; }, regex: ".*$", next: "start" }] } }, { regex : "$", token : "empty", next : function(currentState, stack) { if (stack[0] === "heredoc" || stack[0] === "indentedHeredoc") return stack[0]; return currentState; } }, { token : "variable.language", regex : builtinVariable }, { token : "variable", regex : variable }, { token : "support.function", regex : func }, { token : "support.function", regex : fileDescriptor }, { token : "string", // ' string start : "'", end : "'" }, { token : "constant.numeric", // float regex : floatNumber }, { token : "constant.numeric", // integer regex : integer + "\\b" }, { token : keywordMapper, regex : "[a-zA-Z_$][a-zA-Z0-9_$]*\\b" }, { token : "keyword.operator", regex : "\\+|\\-|\\*|\\*\\*|\\/|\\/\\/|~|<|>|<=|=>|=|!=" }, { token : "paren.lparen", regex : "[\\[\\(\\{]" }, { token : "paren.rparen", regex : "[\\]\\)\\}]" } ] }; this.normalizeRules(); }; oop.inherits(ShHighlightRules, TextHighlightRules); exports.ShHighlightRules = ShHighlightRules; }); define("ace/mode/makefile_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules","ace/mode/sh_highlight_rules"], function(require, exports, module) { "use strict"; var oop = require("../lib/oop"); var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules; var ShHighlightFile = require("./sh_highlight_rules"); var MakefileHighlightRules = function() { var keywordMapper = this.createKeywordMapper({ "keyword": ShHighlightFile.reservedKeywords, "support.function.builtin": ShHighlightFile.languageConstructs, "invalid.deprecated": "debugger" }, "string"); this.$rules = { "start": [ { token: "string.interpolated.backtick.makefile", regex: "`", next: "shell-start" }, { token: "punctuation.definition.comment.makefile", regex: /#(?=.)/, next: "comment" }, { token: [ "keyword.control.makefile"], regex: "^(?:\\s*\\b)(\\-??include|ifeq|ifneq|ifdef|ifndef|else|endif|vpath|export|unexport|define|endef|override)(?:\\b)" }, {// ^([^\t ]+(\s[^\t ]+)*:(?!\=))\s*.* token: ["entity.name.function.makefile", "text"], regex: "^([^\\t ]+(?:\\s[^\\t ]+)*:)(\\s*.*)" } ], "comment": [ { token : "punctuation.definition.comment.makefile", regex : /.+\\/ }, { token : "punctuation.definition.comment.makefile", regex : ".+", next : "start" } ], "shell-start": [ { token: keywordMapper, regex : "[a-zA-Z_$][a-zA-Z0-9_$]*\\b" }, { token: "string", regex : "\\w+" }, { token : "string.interpolated.backtick.makefile", regex : "`", next : "start" } ] } }; oop.inherits(MakefileHighlightRules, TextHighlightRules); exports.MakefileHighlightRules = MakefileHighlightRules; }); define("ace/mode/folding/coffee",["require","exports","module","ace/lib/oop","ace/mode/folding/fold_mode","ace/range"], function(require, exports, module) { "use strict"; var oop = require("../../lib/oop"); var BaseFoldMode = require("./fold_mode").FoldMode; var Range = require("../../range").Range; var FoldMode = exports.FoldMode = function() {}; oop.inherits(FoldMode, BaseFoldMode); (function() { this.getFoldWidgetRange = function(session, foldStyle, row) { var range = this.indentationBlock(session, row); if (range) return range; var re = /\S/; var line = session.getLine(row); var startLevel = line.search(re); if (startLevel == -1 || line[startLevel] != "#") return; var startColumn = line.length; var maxRow = session.getLength(); var startRow = row; var endRow = row; while (++row < maxRow) { line = session.getLine(row); var level = line.search(re); if (level == -1) continue; if (line[level] != "#") break; endRow = row; } if (endRow > startRow) { var endColumn = session.getLine(endRow).length; return new Range(startRow, startColumn, endRow, endColumn); } }; this.getFoldWidget = function(session, foldStyle, row) { var line = session.getLine(row); var indent = line.search(/\S/); var next = session.getLine(row + 1); var prev = session.getLine(row - 1); var prevIndent = prev.search(/\S/); var nextIndent = next.search(/\S/); if (indent == -1) { session.foldWidgets[row - 1] = prevIndent!= -1 && prevIndent < nextIndent ? "start" : ""; return ""; } if (prevIndent == -1) { if (indent == nextIndent && line[indent] == "#" && next[indent] == "#") { session.foldWidgets[row - 1] = ""; session.foldWidgets[row + 1] = ""; return "start"; } } else if (prevIndent == indent && line[indent] == "#" && prev[indent] == "#") { if (session.getLine(row - 2).search(/\S/) == -1) { session.foldWidgets[row - 1] = "start"; session.foldWidgets[row + 1] = ""; return ""; } } if (prevIndent!= -1 && prevIndent < indent) session.foldWidgets[row - 1] = "start"; else session.foldWidgets[row - 1] = ""; if (indent < nextIndent) return "start"; else return ""; }; }).call(FoldMode.prototype); }); define("ace/mode/makefile",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/makefile_highlight_rules","ace/mode/folding/coffee"], function(require, exports, module) { "use strict"; var oop = require("../lib/oop"); var TextMode = require("./text").Mode; var MakefileHighlightRules = require("./makefile_highlight_rules").MakefileHighlightRules; var FoldMode = require("./folding/coffee").FoldMode; var Mode = function() { this.HighlightRules = MakefileHighlightRules; this.foldingRules = new FoldMode(); }; oop.inherits(Mode, TextMode); (function() { this.lineCommentStart = "#"; this.$indentWithTabs = true; this.$id = "ace/mode/makefile"; }).call(Mode.prototype); exports.Mode = Mode; });
gpl-2.0
summerpulse/openjdk7
hotspot/src/share/vm/oops/typeArrayKlass.hpp
3917
/* * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ #ifndef SHARE_VM_OOPS_TYPEARRAYKLASS_HPP #define SHARE_VM_OOPS_TYPEARRAYKLASS_HPP #include "oops/arrayKlass.hpp" // A typeArrayKlass is the klass of a typeArray // It contains the type and size of the elements class typeArrayKlass : public arrayKlass { friend class VMStructs; private: jint _max_length; // maximum number of elements allowed in an array public: // instance variables jint max_length() { return _max_length; } void set_max_length(jint m) { _max_length = m; } // testers bool oop_is_typeArray_slow() const { return true; } // klass allocation DEFINE_ALLOCATE_PERMANENT(typeArrayKlass); static klassOop create_klass(BasicType type, int scale, const char* name_str, TRAPS); static inline klassOop create_klass(BasicType type, int scale, TRAPS) { return create_klass(type, scale, external_name(type), CHECK_NULL); } int oop_size(oop obj) const; int klass_oop_size() const { return object_size(); } bool compute_is_subtype_of(klassOop k); // Allocation typeArrayOop allocate_common(int length, bool do_zero, TRAPS); typeArrayOop allocate(int length, TRAPS) { return allocate_common(length, true, THREAD); } typeArrayOop allocate_permanent(int length, TRAPS); // used for class file structures oop multi_allocate(int rank, jint* sizes, TRAPS); // Copying void copy_array(arrayOop s, int src_pos, arrayOop d, int dst_pos, int length, TRAPS); // Iteration int oop_oop_iterate(oop obj, OopClosure* blk); int oop_oop_iterate_m(oop obj, OopClosure* blk, MemRegion mr); // Garbage collection void oop_follow_contents(oop obj); int oop_adjust_pointers(oop obj); // Parallel Scavenge and Parallel Old PARALLEL_GC_DECLS protected: // Find n'th dimensional array virtual klassOop array_klass_impl(bool or_null, int n, TRAPS); // Returns the array class with this class as element type virtual klassOop array_klass_impl(bool or_null, TRAPS); public: // Casting from klassOop static typeArrayKlass* cast(klassOop k) { assert(k->klass_part()->oop_is_typeArray_slow(), "cast to typeArrayKlass"); return (typeArrayKlass*) k->klass_part(); } // Naming static const char* external_name(BasicType type); // Sizing static int header_size() { return oopDesc::header_size() + sizeof(typeArrayKlass)/HeapWordSize; } int object_size() const { return arrayKlass::object_size(header_size()); } // Initialization (virtual from Klass) void initialize(TRAPS); private: // Helpers static klassOop array_klass_impl(typeArrayKlassHandle h_this, bool or_null, int n, TRAPS); #ifndef PRODUCT public: // Printing void oop_print_on(oop obj, outputStream* st); #endif public: const char* internal_name() const; }; #endif // SHARE_VM_OOPS_TYPEARRAYKLASS_HPP
gpl-2.0
Qalthos/ansible
lib/ansible/modules/network/aci/mso_schema_site_anp_epg.py
6362
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2019, Dag Wieers (@dagwieers) <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: mso_schema_site_anp_epg short_description: Manage site-local Endpoint Groups (EPGs) in schema template description: - Manage site-local EPGs in schema template on Cisco ACI Multi-Site. author: - Dag Wieers (@dagwieers) version_added: '2.8' options: schema: description: - The name of the schema. type: str required: yes site: description: - The name of the site. type: str required: yes template: description: - The name of the template. type: str required: yes anp: description: - The name of the ANP. type: str epg: description: - The name of the EPG to manage. type: str aliases: [ name ] state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. type: str choices: [ absent, present, query ] default: present seealso: - module: mso_schema_site_anp - module: mso_schema_site_anp_epg_subnet - module: mso_schema_template_anp_epg extends_documentation_fragment: mso ''' EXAMPLES = r''' - name: Add a new site EPG mso_schema_site_anp_epg: host: mso_host username: admin password: SomeSecretPassword schema: Schema1 site: Site1 template: Template1 anp: ANP1 epg: EPG1 state: present delegate_to: localhost - name: Remove a site EPG mso_schema_site_anp_epg: host: mso_host username: admin password: SomeSecretPassword schema: Schema1 site: Site1 template: Template1 anp: ANP1 epg: EPG1 state: absent delegate_to: localhost - name: Query a specific site EPGs mso_schema_site_anp_epg: host: mso_host username: admin password: SomeSecretPassword schema: Schema1 site: Site1 template: Template1 anp: ANP1 epg: EPG1 state: query delegate_to: localhost register: query_result - name: Query all site EPGs mso_schema_site_anp_epg: host: mso_host username: admin password: SomeSecretPassword schema: Schema1 site: Site1 template: Template1 anp: ANP1 state: query delegate_to: localhost register: query_result ''' RETURN = r''' ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.aci.mso import MSOModule, mso_argument_spec, issubset def main(): argument_spec = mso_argument_spec() argument_spec.update( schema=dict(type='str', required=True), site=dict(type='str', required=True), template=dict(type='str', required=True), anp=dict(type='str', required=True), epg=dict(type='str', aliases=['name']), # This parameter is not required for querying all objects state=dict(type='str', default='present', choices=['absent', 'present', 'query']), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['epg']], ['state', 'present', ['epg']], ], ) schema = module.params['schema'] site = module.params['site'] template = module.params['template'] anp = module.params['anp'] epg = module.params['epg'] state = module.params['state'] mso = MSOModule(module) # Get schema_id schema_obj = mso.get_obj('schemas', displayName=schema) if not schema_obj: mso.fail_json(msg="Provided schema '{0}' does not exist".format(schema)) schema_path = 'schemas/{id}'.format(**schema_obj) schema_id = schema_obj['id'] # Get site site_id = mso.lookup_site(site) sites = [(s['siteId'], s['templateName']) for s in schema_obj['sites']] if (site_id, template) not in sites: mso.fail_json(msg="Provided site/template '{0}-{1}' does not exist. Existing sites/templates: {2}".format(site, template, ', '.join(sites))) # Schema-access uses indexes site_idx = sites.index((site_id, template)) # Path-based access uses site_id-template site_template = '{0}-{1}'.format(site_id, template) # Get ANP anp_ref = mso.anp_ref(schema_id=schema_id, template=template, anp=anp) anps = [a['anpRef'] for a in schema_obj['sites'][site_idx]['anps']] if anp_ref not in anps: mso.fail_json(msg="Provided anp '{0}' does not exist. Existing anps: {1}".format(anp, ', '.join(anps))) anp_idx = anps.index(anp_ref) # Get EPG epg_ref = mso.epg_ref(schema_id=schema_id, template=template, anp=anp, epg=epg) epgs = [e['epgRef'] for e in schema_obj['sites'][site_idx]['anps'][anp_idx]['epgs']] if epg is not None and epg_ref in epgs: epg_idx = epgs.index(epg_ref) epg_path = '/sites/{0}/anps/{1}/epgs/{2}'.format(site_template, anp, epg) mso.existing = schema_obj['sites'][site_idx]['anps'][anp_idx]['epgs'][epg_idx] if state == 'query': if epg is None: mso.existing = schema_obj['sites'][site_idx]['anps'][anp_idx]['epgs'] elif not mso.existing: mso.fail_json(msg="EPG '{epg}' not found".format(epg=epg)) mso.exit_json() epgs_path = '/sites/{0}/anps/{1}/epgs'.format(site_template, anp) ops = [] mso.previous = mso.existing if state == 'absent': if mso.existing: mso.sent = mso.existing = {} ops.append(dict(op='remove', path=epg_path)) elif state == 'present': payload = dict( epgRef=dict( schemaId=schema_id, templateName=template, anpName=anp, epgName=epg, ), ) mso.sanitize(payload, collate=True) if not mso.existing: ops.append(dict(op='add', path=epgs_path + '/-', value=mso.sent)) mso.existing = mso.proposed if not module.check_mode: mso.request(schema_path, method='PATCH', data=ops) mso.exit_json() if __name__ == "__main__": main()
gpl-3.0
sammarshallou/moodle
lib/yui/3.2.0/build/datatype/lang/datatype-date_hi-IN.js
1205
/* Copyright (c) 2010, Yahoo! Inc. All rights reserved. Code licensed under the BSD License: http://developer.yahoo.com/yui/license.html version: 3.2.0 build: 2676 */ YUI.add("lang/datatype-date-format_hi-IN",function(A){A.Intl.add("datatype-date-format","hi-IN",{"a":["रवि","सोम","मंगल","बुध","गुरु","शुक्र","शनि"],"A":["रविवार","सोमवार","मंगलवार","बुधवार","गुरुवार","शुक्रवार","शनिवार"],"b":["जनवरी","फरवरी","मार्च","अप्रैल","मई","जून","जुलाई","अगस्त","सितम्बर","अक्तूबर","नवम्बर","दिसम्बर"],"B":["जनवरी","फरवरी","मार्च","अप्रैल","मई","जून","जुलाई","अगस्त","सितम्बर","अक्तूबर","नवम्बर","दिसम्बर"],"c":"%a, %d %b %Y %l:%M:%S %p %Z","p":["AM","PM"],"P":["am","pm"],"x":"%d-%m-%y","X":"%l:%M:%S %p"});},"3.2.0");YUI.add("lang/datatype-date_hi-IN",function(A){},"3.2.0",{use:["lang/datatype-date-format_hi-IN"]});
gpl-3.0
BuchuBaron/d8-bootstrap-template
profiles/badger/modules/paragraphs/src/Tests/ParagraphsWidgetButtonsTest.php
5626
<?php namespace Drupal\paragraphs\Tests; use Drupal\field_ui\Tests\FieldUiTestTrait; /** * Tests paragraphs widget buttons. * * @group paragraphs */ class ParagraphsWidgetButtonsTest extends ParagraphsTestBase { use FieldUiTestTrait; /** * Tests the widget buttons of paragraphs. */ public function testWidgetButtons() { $this->addParagraphedContentType('paragraphed_test', 'field_paragraphs'); $this->loginAsAdmin(['create paragraphed_test content', 'edit any paragraphed_test content']); // Add a Paragraph type. $paragraph_type = 'text_paragraph'; $this->addParagraphsType($paragraph_type); // Add a text field to the text_paragraph type. static::fieldUIAddNewField('admin/structure/paragraphs_type/' . $paragraph_type, 'text', 'Text', 'text_long', [], []); $this->drupalPostAjaxForm('node/add/paragraphed_test', [], 'field_paragraphs_text_paragraph_add_more'); // Create a node with a Paragraph. $text = 'recognizable_text'; $edit = [ 'title[0][value]' => 'paragraphs_mode_test', 'field_paragraphs[0][subform][field_text][0][value]' => $text, ]; $this->drupalPostForm(NULL, $edit, t('Save and publish')); $node = $this->drupalGetNodeByTitle('paragraphs_mode_test'); // Test the 'Open' mode. $this->drupalGet('node/' . $node->id() . '/edit'); $this->assertFieldByName('field_paragraphs[0][subform][field_text][0][value]', $text); $this->drupalPostForm(NULL, [], t('Save and keep published')); $this->assertText($text); // Test the 'Closed' mode. $this->setParagraphsWidgetMode('paragraphed_test', 'field_paragraphs', 'closed'); $this->drupalGet('node/' . $node->id() . '/edit'); // Click "Edit" button. $this->drupalPostAjaxForm(NULL, [], 'field_paragraphs_0_edit'); $this->assertFieldByName('field_paragraphs[0][subform][field_text][0][value]', $text); $closed_mode_text = 'closed_mode_text'; $edit = ['field_paragraphs[0][subform][field_text][0][value]' => $closed_mode_text]; // Click "Collapse" button. $this->drupalPostAjaxForm(NULL, $edit, 'field_paragraphs_0_collapse'); $this->assertText('Warning: this content must be saved to reflect changes on this Paragraph item.'); $this->assertNoText($closed_mode_text); $this->drupalPostForm(NULL, [], t('Save and keep published')); $this->assertText('paragraphed_test ' . $node->label() . ' has been updated.'); $this->assertText($closed_mode_text); // Test the 'Preview' mode. $this->setParagraphsWidgetMode('paragraphed_test', 'field_paragraphs', 'preview'); $this->drupalGet('node/' . $node->id() . '/edit'); // Click "Edit" button. $this->drupalPostAjaxForm(NULL, [], 'field_paragraphs_0_edit'); $this->assertFieldByName('field_paragraphs[0][subform][field_text][0][value]', $closed_mode_text); $preview_mode_text = 'preview_mode_text'; $edit = ['field_paragraphs[0][subform][field_text][0][value]' => $preview_mode_text]; // Click "Collapse" button. $this->drupalPostAjaxForm(NULL, $edit, 'field_paragraphs_0_collapse'); $this->assertText('Warning: this content must be saved to reflect changes on this Paragraph item.'); $this->assertText($preview_mode_text); $this->drupalPostForm(NULL, [], t('Save and keep published')); $this->assertText('paragraphed_test ' . $node->label() . ' has been updated.'); $this->assertText($preview_mode_text); // Test the remove/restore function. $this->drupalGet('node/' . $node->id() . '/edit'); $this->assertText($preview_mode_text); // Click "Remove" button. $this->drupalPostAjaxForm(NULL, [], 'field_paragraphs_0_remove'); $this->assertText('Deleted Paragraph: text_paragraph'); // Click "Restore" button. $this->drupalPostAjaxForm(NULL, [], 'field_paragraphs_0_restore'); $this->assertFieldByName('field_paragraphs[0][subform][field_text][0][value]', $preview_mode_text); $restore_text = 'restore_text'; $edit = ['field_paragraphs[0][subform][field_text][0][value]' => $restore_text]; $this->drupalPostForm(NULL, $edit, t('Save and keep published')); $this->assertText('paragraphed_test ' . $node->label() . ' has been updated.'); $this->assertText($restore_text); // Test the remove/confirm remove function. $this->drupalGet('node/' . $node->id() . '/edit'); $this->assertText($restore_text); // Click "Remove" button. $this->drupalPostAjaxForm(NULL, [], 'field_paragraphs_0_remove'); $this->assertText('Deleted Paragraph: text_paragraph'); // Click "Confirm Removal" button. $this->drupalPostAjaxForm(NULL, [], 'field_paragraphs_0_confirm_remove'); $this->drupalPostForm(NULL, [], t('Save and keep published')); $this->assertText('paragraphed_test ' . $node->label() . ' has been updated.'); $this->assertNoText($restore_text); } /** * Sets the Paragraphs widget display mode. * * @param string $content_type * Content type name where to set the widget mode. * @param string $paragraphs_field * Paragraphs field to change the mode. * @param string $mode * Mode to be set. ('closed', 'preview' or 'open'). */ protected function setParagraphsWidgetMode($content_type, $paragraphs_field, $mode) { $this->drupalGet('admin/structure/types/manage/' . $content_type . '/form-display'); $this->drupalPostAjaxForm(NULL, [], $paragraphs_field . '_settings_edit'); $this->drupalPostForm(NULL, ['fields[' . $paragraphs_field . '][settings_edit_form][settings][edit_mode]' => $mode], t('Update')); $this->drupalPostForm(NULL, [], 'Save'); } }
gpl-3.0
zcopley/StatusNet
actions/apitimelineretweetedbyme.php
2435
<?php /** * StatusNet, the distributed open-source microblogging tool * * Show authenticating user's most recent repeats * * PHP version 5 * * LICENCE: This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * @category API * @package StatusNet * @author Evan Prodromou <[email protected]> * @copyright 2009 StatusNet, Inc. * @license http://www.fsf.org/licensing/licenses/agpl-3.0.html GNU Affero General Public License version 3.0 * @link http://status.net/ */ if (!defined('STATUSNET')) { exit(1); } require_once INSTALLDIR . '/lib/apiauth.php'; require_once INSTALLDIR . '/lib/mediafile.php'; /** * Show authenticating user's most recent repeats * * @category API * @package StatusNet * @author Evan Prodromou <[email protected]> * @license http://www.fsf.org/licensing/licenses/agpl-3.0.html GNU Affero General Public License version 3.0 * @link http://status.net/ */ class ApiTimelineRetweetedByMeAction extends ApiAuthAction { const DEFAULTCOUNT = 20; const MAXCOUNT = 200; const MAXNOTICES = 3200; var $repeats = null; var $cnt = self::DEFAULTCOUNT; var $page = 1; var $since_id = null; var $max_id = null; /** * Take arguments for running * * @param array $args $_REQUEST args * * @return boolean success flag * */ function prepare($args) { parent::prepare($args); // TRANS: Server error displayed calling unimplemented API method for 'retweeted by me'. $this->serverError(_('Unimplemented.'), 503); return false; } /** * Return true if read only. * * @param array $args other arguments * * @return boolean is read only action? */ function isReadOnly($args) { return true; } }
agpl-3.0
liggitt/origin
vendor/k8s.io/kubernetes/cmd/kube-controller-manager/app/patch_gc.go
3029
package app import ( "k8s.io/kubernetes/cmd/kube-controller-manager/app/config" "k8s.io/kubernetes/pkg/apis/componentconfig" ) func applyOpenShiftGCConfig(controllerManager *config.Config) error { // TODO make this configurable or discoverable. This is going to prevent us from running the stock GC controller // IF YOU ADD ANYTHING TO THIS LIST, MAKE SURE THAT YOU UPDATE THEIR STRATEGIES TO PREVENT GC FINALIZERS controllerManager.ComponentConfig.GarbageCollectorController.GCIgnoredResources = append(controllerManager.ComponentConfig.GarbageCollectorController.GCIgnoredResources, // explicitly disabled from GC for now - not enough value to track them componentconfig.GroupResource{Group: "authorization.openshift.io", Resource: "rolebindingrestrictions"}, componentconfig.GroupResource{Group: "network.openshift.io", Resource: "clusternetworks"}, componentconfig.GroupResource{Group: "network.openshift.io", Resource: "egressnetworkpolicies"}, componentconfig.GroupResource{Group: "network.openshift.io", Resource: "hostsubnets"}, componentconfig.GroupResource{Group: "network.openshift.io", Resource: "netnamespaces"}, componentconfig.GroupResource{Group: "oauth.openshift.io", Resource: "oauthclientauthorizations"}, componentconfig.GroupResource{Group: "oauth.openshift.io", Resource: "oauthclients"}, componentconfig.GroupResource{Group: "quota.openshift.io", Resource: "clusterresourcequotas"}, componentconfig.GroupResource{Group: "user.openshift.io", Resource: "groups"}, componentconfig.GroupResource{Group: "user.openshift.io", Resource: "identities"}, componentconfig.GroupResource{Group: "user.openshift.io", Resource: "users"}, componentconfig.GroupResource{Group: "image.openshift.io", Resource: "images"}, // virtual resource componentconfig.GroupResource{Group: "project.openshift.io", Resource: "projects"}, // virtual and unwatchable resource, surfaced via rbac.authorization.k8s.io objects componentconfig.GroupResource{Group: "authorization.openshift.io", Resource: "clusterroles"}, componentconfig.GroupResource{Group: "authorization.openshift.io", Resource: "clusterrolebindings"}, componentconfig.GroupResource{Group: "authorization.openshift.io", Resource: "roles"}, componentconfig.GroupResource{Group: "authorization.openshift.io", Resource: "rolebindings"}, // these resources contain security information in their names, and we don't need to track them componentconfig.GroupResource{Group: "oauth.openshift.io", Resource: "oauthaccesstokens"}, componentconfig.GroupResource{Group: "oauth.openshift.io", Resource: "oauthauthorizetokens"}, // exposed already as extensions v1beta1 by other controllers componentconfig.GroupResource{Group: "apps", Resource: "deployments"}, // exposed as autoscaling v1 componentconfig.GroupResource{Group: "extensions", Resource: "horizontalpodautoscalers"}, // exposed as security.openshift.io v1 componentconfig.GroupResource{Group: "", Resource: "securitycontextconstraints"}, ) return nil }
apache-2.0
maxamillion/origin
vendor/github.com/influxdata/influxdb/cmd/influx_inspect/verify/verify.go
2696
package verify import ( "flag" "fmt" "hash/crc32" "io" "os" "path/filepath" "text/tabwriter" "time" "github.com/influxdata/influxdb/tsdb/engine/tsm1" ) // Command represents the program execution for "influx_inspect verify". type Command struct { Stderr io.Writer Stdout io.Writer } // NewCommand returns a new instance of Command. func NewCommand() *Command { return &Command{ Stderr: os.Stderr, Stdout: os.Stdout, } } // Run executes the command. func (cmd *Command) Run(args ...string) error { var path string fs := flag.NewFlagSet("verify", flag.ExitOnError) fs.StringVar(&path, "dir", os.Getenv("HOME")+"/.influxdb", "Root storage path. [$HOME/.influxdb]") fs.SetOutput(cmd.Stdout) fs.Usage = cmd.printUsage if err := fs.Parse(args); err != nil { return err } start := time.Now() dataPath := filepath.Join(path, "data") brokenBlocks := 0 totalBlocks := 0 // No need to do this in a loop ext := fmt.Sprintf(".%s", tsm1.TSMFileExtension) // Get all TSM files by walking through the data dir files := []string{} err := filepath.Walk(dataPath, func(path string, f os.FileInfo, err error) error { if err != nil { return err } if filepath.Ext(path) == ext { files = append(files, path) } return nil }) if err != nil { panic(err) } tw := tabwriter.NewWriter(cmd.Stdout, 16, 8, 0, '\t', 0) // Verify the checksums of every block in every file for _, f := range files { file, err := os.OpenFile(f, os.O_RDONLY, 0600) if err != nil { return err } reader, err := tsm1.NewTSMReader(file) if err != nil { return err } blockItr := reader.BlockIterator() brokenFileBlocks := 0 count := 0 for blockItr.Next() { totalBlocks++ key, _, _, checksum, buf, err := blockItr.Read() if err != nil { brokenBlocks++ fmt.Fprintf(tw, "%s: could not get checksum for key %v block %d due to error: %q\n", f, key, count, err) } else if expected := crc32.ChecksumIEEE(buf); checksum != expected { brokenBlocks++ fmt.Fprintf(tw, "%s: got %d but expected %d for key %v, block %d\n", f, checksum, expected, key, count) } count++ } if brokenFileBlocks == 0 { fmt.Fprintf(tw, "%s: healthy\n", f) } reader.Close() } fmt.Fprintf(tw, "Broken Blocks: %d / %d, in %vs\n", brokenBlocks, totalBlocks, time.Since(start).Seconds()) tw.Flush() return nil } // printUsage prints the usage message to STDERR. func (cmd *Command) printUsage() { usage := fmt.Sprintf(`Verifies the the checksum of shards. Usage: influx_inspect verify [flags] -dir <path> Root storage path Defaults to "%[1]s/.influxdb". `, os.Getenv("HOME")) fmt.Fprintf(cmd.Stdout, usage) }
apache-2.0
agrare/manageiq
spec/models/miq_widget/report_content_spec.rb
3355
RSpec.describe MiqWidget, "::ReportContent" do let(:vm_count) { 2 } let(:widget) do MiqWidget.sync_from_hash(YAML.load(" description: report_vendor_and_guest_os title: Vendor and Guest OS content_type: report options: :col_order: - name - vendor_display :row_count: #{vm_count} visibility: :roles: - _ALL_ resource_name: Vendor and Guest OS resource_type: MiqReport enabled: true read_only: true ")) end before do MiqReport.seed_report("Vendor and Guest OS") EvmSpecHelper.create_guid_miq_server_zone @admin = FactoryBot.create(:user_admin) @admin_group = @admin.current_group FactoryBot.create_list(:vm_vmware, vm_count) end it "#generate_one_content_for_user" do content = widget.generate_one_content_for_user(@admin_group, @admin) expect(content).to be_kind_of MiqWidgetContent expect(content.updated_at).to be_within(2.seconds).of(Time.now.utc) expect(content.contents.scan("</tr>").length).to eq(widget.options[:row_count] + 1) expect(content.contents.scan("</td>").length).to eq(widget.options[:row_count] * widget.options[:col_order].length) expect(content.contents.scan("</th>").length).to eq(widget.options[:col_order].length) expect(content.miq_report_result.html_rows(:offset => 0, :limit => 1).first.scan("</td>").length).to eq(widget.resource.col_order.length) expect(content.miq_report_result.html_rows.count { |c| c.match("<td>VMware</td>") }).to eq(vm_count) expect(content.contents).to match "<tr><th>Name</th><th>Container</th></tr>" expect(widget.contents_for_user(@admin)).to eq(content) end it "#generate_one_content_for_group" do content = widget.generate_one_content_for_group(@admin.current_group, @admin.get_timezone) expect(content).to be_kind_of MiqWidgetContent expect(content.updated_at).to be_within(2.seconds).of(Time.now.utc) expect(content.contents.scan("</tr>").length).to eq(widget.options[:row_count] + 1) expect(content.contents.scan("</td>").length).to eq(widget.options[:row_count] * widget.options[:col_order].length) expect(content.contents.scan("</th>").length).to eq(widget.options[:col_order].length) expect(content.miq_report_result.html_rows(:offset => 0, :limit => 1).first.scan("</td>").length).to eq(widget.resource.col_order.length) expect(content.miq_report_result.html_rows.count { |c| c.match("<td>VMware</td>") }).to eq(vm_count) expect(content.contents).to match "<tr><th>Name</th><th>Container</th></tr>" expect(widget.contents_for_user(@admin)).to eq(content) end it "#generate with self service user" do self_service_role = FactoryBot.create( :miq_user_role, :name => "ss_role", :settings => {:restrictions => {:vms => :user_or_group}} ) self_service_group = FactoryBot.create( :miq_group, :description => "EvmGroup-self_service", :miq_user_role => self_service_role ) user2 = FactoryBot.create(:user, :miq_groups => [self_service_group]) report = widget.generate_report(self_service_group, user2) content = MiqWidget::ReportContent.new(:report => report, :resource => widget.resource, :timezone => "UTC", :widget_options => widget.options) expect { content.generate(user2) }.not_to raise_error end end
apache-2.0
ayermac/cblog
thinkphp/library/think/Build.php
7591
<?php // +---------------------------------------------------------------------- // | ThinkPHP [ WE CAN DO IT JUST THINK ] // +---------------------------------------------------------------------- // | Copyright (c) 2006~2017 http://thinkphp.cn All rights reserved. // +---------------------------------------------------------------------- // | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 ) // +---------------------------------------------------------------------- // | Author: liu21st <[email protected]> // +---------------------------------------------------------------------- namespace think; class Build { /** * 根据传入的build资料创建目录和文件 * @access protected * @param array $build build列表 * @param string $namespace 应用类库命名空间 * @param bool $suffix 类库后缀 * @return void */ public static function run(array $build = [], $namespace = 'app', $suffix = false) { // 锁定 $lockfile = APP_PATH . 'build.lock'; if (is_writable($lockfile)) { return; } elseif (!touch($lockfile)) { throw new Exception('应用目录[' . APP_PATH . ']不可写,目录无法自动生成!<BR>请手动生成项目目录~', 10006); } foreach ($build as $module => $list) { if ('__dir__' == $module) { // 创建目录列表 self::buildDir($list); } elseif ('__file__' == $module) { // 创建文件列表 self::buildFile($list); } else { // 创建模块 self::module($module, $list, $namespace, $suffix); } } // 解除锁定 unlink($lockfile); } /** * 创建目录 * @access protected * @param array $list 目录列表 * @return void */ protected static function buildDir($list) { foreach ($list as $dir) { if (!is_dir(APP_PATH . $dir)) { // 创建目录 mkdir(APP_PATH . $dir, 0755, true); } } } /** * 创建文件 * @access protected * @param array $list 文件列表 * @return void */ protected static function buildFile($list) { foreach ($list as $file) { if (!is_dir(APP_PATH . dirname($file))) { // 创建目录 mkdir(APP_PATH . dirname($file), 0755, true); } if (!is_file(APP_PATH . $file)) { file_put_contents(APP_PATH . $file, 'php' == pathinfo($file, PATHINFO_EXTENSION) ? "<?php\n" : ''); } } } /** * 创建模块 * @access public * @param string $module 模块名 * @param array $list build列表 * @param string $namespace 应用类库命名空间 * @param bool $suffix 类库后缀 * @return void */ public static function module($module = '', $list = [], $namespace = 'app', $suffix = false) { $module = $module ? $module : ''; if (!is_dir(APP_PATH . $module)) { // 创建模块目录 mkdir(APP_PATH . $module); } if (basename(RUNTIME_PATH) != $module) { // 创建配置文件和公共文件 self::buildCommon($module); // 创建模块的默认页面 self::buildHello($module, $namespace, $suffix); } if (empty($list)) { // 创建默认的模块目录和文件 $list = [ '__file__' => ['config.php', 'common.php'], '__dir__' => ['controller', 'model', 'view'], ]; } // 创建子目录和文件 foreach ($list as $path => $file) { $modulePath = APP_PATH . $module . DS; if ('__dir__' == $path) { // 生成子目录 foreach ($file as $dir) { self::checkDirBuild($modulePath . $dir); } } elseif ('__file__' == $path) { // 生成(空白)文件 foreach ($file as $name) { if (!is_file($modulePath . $name)) { file_put_contents($modulePath . $name, 'php' == pathinfo($name, PATHINFO_EXTENSION) ? "<?php\n" : ''); } } } else { // 生成相关MVC文件 foreach ($file as $val) { $val = trim($val); $filename = $modulePath . $path . DS . $val . ($suffix ? ucfirst($path) : '') . EXT; $space = $namespace . '\\' . ($module ? $module . '\\' : '') . $path; $class = $val . ($suffix ? ucfirst($path) : ''); switch ($path) { case 'controller': // 控制器 $content = "<?php\nnamespace {$space};\n\nclass {$class}\n{\n\n}"; break; case 'model': // 模型 $content = "<?php\nnamespace {$space};\n\nuse think\Model;\n\nclass {$class} extends Model\n{\n\n}"; break; case 'view': // 视图 $filename = $modulePath . $path . DS . $val . '.html'; self::checkDirBuild(dirname($filename)); $content = ''; break; default: // 其他文件 $content = "<?php\nnamespace {$space};\n\nclass {$class}\n{\n\n}"; } if (!is_file($filename)) { file_put_contents($filename, $content); } } } } } /** * 创建模块的欢迎页面 * @access public * @param string $module 模块名 * @param string $namespace 应用类库命名空间 * @param bool $suffix 类库后缀 * @return void */ protected static function buildHello($module, $namespace, $suffix = false) { $filename = APP_PATH . ($module ? $module . DS : '') . 'controller' . DS . 'Index' . ($suffix ? 'Controller' : '') . EXT; if (!is_file($filename)) { $content = file_get_contents(THINK_PATH . 'tpl' . DS . 'default_index.tpl'); $content = str_replace(['{$app}', '{$module}', '{layer}', '{$suffix}'], [$namespace, $module ? $module . '\\' : '', 'controller', $suffix ? 'Controller' : ''], $content); self::checkDirBuild(dirname($filename)); file_put_contents($filename, $content); } } /** * 创建模块的公共文件 * @access public * @param string $module 模块名 * @return void */ protected static function buildCommon($module) { $filename = CONF_PATH . ($module ? $module . DS : '') . 'config.php'; self::checkDirBuild(dirname($filename)); if (!is_file($filename)) { file_put_contents($filename, "<?php\n//配置文件\nreturn [\n\n];"); } $filename = APP_PATH . ($module ? $module . DS : '') . 'common.php'; if (!is_file($filename)) { file_put_contents($filename, "<?php\n"); } } protected static function checkDirBuild($dirname) { if (!is_dir($dirname)) { mkdir($dirname, 0755, true); } } }
apache-2.0
droyad/roslyn
src/Scripting/Test/ScriptEngine.cs
10239
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.IO; using System.Reflection; using System.Runtime.InteropServices; using Microsoft.CodeAnalysis; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Scripting { /// <summary> /// Represents a runtime execution context for C# scripts. /// </summary> internal abstract class ScriptEngine { public static readonly ImmutableArray<string> DefaultReferenceSearchPaths; // state captured by session at creation time: private ScriptOptions _options; private readonly ScriptBuilder _builder; static ScriptEngine() { DefaultReferenceSearchPaths = ImmutableArray.Create(RuntimeEnvironment.GetRuntimeDirectory()); } internal ScriptEngine(ScriptOptions options, MetadataFileReferenceProvider metadataReferenceProvider) { _options = options; if (metadataReferenceProvider == null) { metadataReferenceProvider = _options.AssemblyResolver.Provider; } _builder = new ScriptBuilder(); _options = _options.WithReferenceProvider(metadataReferenceProvider); string initialBaseDirectory; try { initialBaseDirectory = Directory.GetCurrentDirectory(); } catch { initialBaseDirectory = null; } _options = _options.WithBaseDirectory(initialBaseDirectory); } public MetadataFileReferenceProvider MetadataReferenceProvider { get { return _options.AssemblyResolver.Provider; } } internal ScriptBuilder Builder { get { return _builder; } } // TODO (tomat): Consider exposing FileResolver and removing BaseDirectory. // We would need WithAssemblySearchPaths on FileResolver to implement SetReferenceSearchPaths internal MetadataFileReferenceResolver MetadataReferenceResolver { get { return _options.AssemblyResolver.PathResolver; } // for testing set { Debug.Assert(value != null); _options = _options.WithReferenceResolver(value); } } internal abstract Script<T> Create<T>(string code, ScriptOptions options, Type globalsType); #region Session public Session CreateSession() // TODO (tomat): bool isCancellable = false { return new Session(this, _options, null); } public Session CreateSession(object hostObject) // TODO (tomat): bool isCancellable = false { if (hostObject == null) { throw new ArgumentNullException(nameof(hostObject)); } return new Session(this, _options, hostObject, hostObject.GetType()); } public Session CreateSession(object hostObject, Type hostObjectType) // TODO (tomat): bool isCancellable = false { if (hostObject == null) { throw new ArgumentNullException(nameof(hostObject)); } if (hostObjectType == null) { throw new ArgumentNullException(nameof(hostObjectType)); } Type actualType = hostObject.GetType(); if (!hostObjectType.IsAssignableFrom(actualType)) { throw new ArgumentException(String.Format(ScriptingResources.CantAssignTo, actualType, hostObjectType), "hostObjectType"); } return new Session(this, _options, hostObject, hostObjectType); } public Session CreateSession<THostObject>(THostObject hostObject) // TODO (tomat): bool isCancellable = false where THostObject : class { if (hostObject == null) { throw new ArgumentNullException(nameof(hostObject)); } return new Session(this, _options, hostObject, typeof(THostObject)); } #endregion #region State /// <summary> /// The base directory used to resolve relative paths to assembly references and /// relative paths that appear in source code compiled by this script engine. /// </summary> /// <remarks> /// If null relative paths won't be resolved and an error will be reported when the compiler encounters such paths. /// The value can be changed at any point in time. However the new value doesn't affect already compiled submissions. /// The initial value is the current working directory if the current process, or null if not available. /// Changing the base directory doesn't affect the process current working directory used by <see cref="System.IO"/> APIs. /// </remarks> public string BaseDirectory { get { return _options.BaseDirectory; } set { _options = _options.WithBaseDirectory(value); } } public ImmutableArray<string> ReferenceSearchPaths { get { return _options.SearchPaths; } } public void SetReferenceSearchPaths(params string[] paths) { SetReferenceSearchPaths(ImmutableArray.CreateRange<string>(paths)); } public void SetReferenceSearchPaths(IEnumerable<string> paths) { SetReferenceSearchPaths(ImmutableArray.CreateRange<string>(paths)); } public void SetReferenceSearchPaths(ImmutableArray<string> paths) { MetadataFileReferenceResolver.ValidateSearchPaths(paths, "paths"); _options = _options.WithSearchPaths(paths); } /// <summary> /// Returns a list of assemblies that are currently referenced by the engine. /// </summary> public ImmutableArray<MetadataReference> GetReferences() { return _options.References; } /// <summary> /// Adds a reference to specified assembly. /// </summary> /// <param name="assemblyDisplayNameOrPath">Assembly display name or path.</param> /// <exception cref="ArgumentNullException"><paramref name="assemblyDisplayNameOrPath"/> is null.</exception> /// <exception cref="ArgumentException"><paramref name="assemblyDisplayNameOrPath"/> is empty.</exception> /// <exception cref="FileNotFoundException">Assembly file can't be found.</exception> public void AddReference(string assemblyDisplayNameOrPath) { if (assemblyDisplayNameOrPath == null) { throw new ArgumentNullException(nameof(assemblyDisplayNameOrPath)); } _options = _options.AddReferences(assemblyDisplayNameOrPath); } /// <summary> /// Adds a reference to specified assembly. /// </summary> /// <param name="assembly">Runtime assembly. The assembly must be loaded from a file on disk. In-memory assemblies are not supported.</param> /// <exception cref="ArgumentNullException"><paramref name="assembly"/> is null.</exception> public void AddReference(Assembly assembly) { if (assembly == null) { throw new ArgumentNullException(nameof(assembly)); } _options = _options.AddReferences(assembly); } /// <summary> /// Adds a reference to specified assembly. /// </summary> /// <param name="reference">Assembly reference.</param> /// <exception cref="ArgumentException"><paramref name="reference"/> is not an assembly reference (it's a module).</exception> /// <exception cref="ArgumentNullException"><paramref name="reference"/> is null.</exception> public void AddReference(MetadataReference reference) { if (reference == null) { throw new ArgumentNullException(nameof(reference)); } if (reference.Properties.Kind != MetadataImageKind.Assembly) { throw new ArgumentException(ScriptingResources.ExpectedAnAssemblyReference, nameof(reference)); } _options = _options.AddReferences(reference); } /// <summary> /// Returns a list of imported namespaces. /// </summary> public ImmutableArray<string> GetImportedNamespaces() { return _options.Namespaces; } /// <summary> /// Imports a namespace, an equivalent of executing "using <paramref name="namespace"/>;" (C#) or "Imports <paramref name="namespace"/>" (VB). /// </summary> /// <exception cref="ArgumentNullException"><paramref name="namespace"/> is null.</exception> /// <exception cref="ArgumentException"><paramref name="namespace"/> is not a valid namespace name.</exception> public void ImportNamespace(string @namespace) { ValidateNamespace(@namespace); // we don't report duplicates to get the same behavior as evaluating "using NS;" twice. _options = _options.AddNamespaces(@namespace); } internal static void ValidateNamespace(string @namespace) { if (@namespace == null) { throw new ArgumentNullException(nameof(@namespace)); } // Only check that the namespace is a CLR namespace name. // If the namespace doesn't exist an error will be reported when compiling the next submission. if ([email protected]()) { throw new ArgumentException("Invalid namespace name", nameof(@namespace)); } } #endregion } }
apache-2.0
q4-public/alexa-workshop-2017
src/node_modules/aws-sdk/lib/dynamodb/converter.js
9801
var AWS = require('../core'); var util = AWS.util; var typeOf = require('./types').typeOf; var DynamoDBSet = require('./set'); var NumberValue = require('./numberValue'); AWS.DynamoDB.Converter = { /** * Convert a JavaScript value to its equivalent DynamoDB AttributeValue type * * @param data [any] The data to convert to a DynamoDB AttributeValue * @param options [map] * @option options convertEmptyValues [Boolean] Whether to automatically * convert empty strings, blobs, * and sets to `null` * @option options wrapNumbers [Boolean] Whether to return numbers as a * NumberValue object instead of * converting them to native JavaScript * numbers. This allows for the safe * round-trip transport of numbers of * arbitrary size. * @return [map] An object in the Amazon DynamoDB AttributeValue format * * @see AWS.DynamoDB.Converter.marshall AWS.DynamoDB.Converter.marshall to * convert entire records (rather than individual attributes) */ input: function convertInput(data, options) { options = options || {}; var type = typeOf(data); if (type === 'Object') { return formatMap(data, options); } else if (type === 'Array') { return formatList(data, options); } else if (type === 'Set') { return formatSet(data, options); } else if (type === 'String') { if (data.length === 0 && options.convertEmptyValues) { return convertInput(null); } return { S: data }; } else if (type === 'Number' || type === 'NumberValue') { return { N: data.toString() }; } else if (type === 'Binary') { if (data.length === 0 && options.convertEmptyValues) { return convertInput(null); } return { B: data }; } else if (type === 'Boolean') { return { BOOL: data }; } else if (type === 'null') { return { NULL: true }; } else if (type !== 'undefined' && type !== 'Function') { // this value has a custom constructor return formatMap(data, options); } }, /** * Convert a JavaScript object into a DynamoDB record. * * @param data [any] The data to convert to a DynamoDB record * @param options [map] * @option options convertEmptyValues [Boolean] Whether to automatically * convert empty strings, blobs, * and sets to `null` * @option options wrapNumbers [Boolean] Whether to return numbers as a * NumberValue object instead of * converting them to native JavaScript * numbers. This allows for the safe * round-trip transport of numbers of * arbitrary size. * * @return [map] An object in the DynamoDB record format. * * @example Convert a JavaScript object into a DynamoDB record * var marshalled = AWS.DynamoDB.Converter.marshall({ * string: 'foo', * list: ['fizz', 'buzz', 'pop'], * map: { * nestedMap: { * key: 'value', * } * }, * number: 123, * nullValue: null, * boolValue: true, * stringSet: new DynamoDBSet(['foo', 'bar', 'baz']) * }); */ marshall: function marshallItem(data, options) { return AWS.DynamoDB.Converter.input(data, options).M; }, /** * Convert a DynamoDB AttributeValue object to its equivalent JavaScript type. * * @param data [map] An object in the Amazon DynamoDB AttributeValue format * @param options [map] * @option options convertEmptyValues [Boolean] Whether to automatically * convert empty strings, blobs, * and sets to `null` * @option options wrapNumbers [Boolean] Whether to return numbers as a * NumberValue object instead of * converting them to native JavaScript * numbers. This allows for the safe * round-trip transport of numbers of * arbitrary size. * * @return [Object|Array|String|Number|Boolean|null] * * @see AWS.DynamoDB.Converter.unmarshall AWS.DynamoDB.Converter.unmarshall to * convert entire records (rather than individual attributes) */ output: function convertOutput(data, options) { options = options || {}; var list, map, i; for (var type in data) { var values = data[type]; if (type === 'M') { map = {}; for (var key in values) { map[key] = convertOutput(values[key], options); } return map; } else if (type === 'L') { list = []; for (i = 0; i < values.length; i++) { list.push(convertOutput(values[i], options)); } return list; } else if (type === 'SS') { list = []; for (i = 0; i < values.length; i++) { list.push(values[i] + ''); } return new DynamoDBSet(list); } else if (type === 'NS') { list = []; for (i = 0; i < values.length; i++) { list.push(convertNumber(values[i], options.wrapNumbers)); } return new DynamoDBSet(list); } else if (type === 'BS') { list = []; for (i = 0; i < values.length; i++) { list.push(new util.Buffer(values[i])); } return new DynamoDBSet(list); } else if (type === 'S') { return values + ''; } else if (type === 'N') { return convertNumber(values, options.wrapNumbers); } else if (type === 'B') { return new util.Buffer(values); } else if (type === 'BOOL') { return (values === 'true' || values === 'TRUE' || values === true); } else if (type === 'NULL') { return null; } } }, /** * Convert a DynamoDB record into a JavaScript object. * * @param data [any] The DynamoDB record * @param options [map] * @option options convertEmptyValues [Boolean] Whether to automatically * convert empty strings, blobs, * and sets to `null` * @option options wrapNumbers [Boolean] Whether to return numbers as a * NumberValue object instead of * converting them to native JavaScript * numbers. This allows for the safe * round-trip transport of numbers of * arbitrary size. * * @return [map] An object whose properties have been converted from * DynamoDB's AttributeValue format into their corresponding native * JavaScript types. * * @example Convert a record received from a DynamoDB stream * var unmarshalled = AWS.DynamoDB.Converter.unmarshall({ * string: {S: 'foo'}, * list: {L: [{S: 'fizz'}, {S: 'buzz'}, {S: 'pop'}]}, * map: { * M: { * nestedMap: { * M: { * key: {S: 'value'} * } * } * } * }, * number: {N: '123'}, * nullValue: {NULL: true}, * boolValue: {BOOL: true} * }); */ unmarshall: function unmarshall(data, options) { return AWS.DynamoDB.Converter.output({M: data}, options); } }; /** * @api private * @param data [Array] * @param options [map] */ function formatList(data, options) { var list = {L: []}; for (var i = 0; i < data.length; i++) { list['L'].push(AWS.DynamoDB.Converter.input(data[i], options)); } return list; } /** * @api private * @param value [String] * @param wrapNumbers [Boolean] */ function convertNumber(value, wrapNumbers) { return wrapNumbers ? new NumberValue(value) : Number(value); } /** * @api private * @param data [map] * @param options [map] */ function formatMap(data, options) { var map = {M: {}}; for (var key in data) { var formatted = AWS.DynamoDB.Converter.input(data[key], options); if (formatted !== void 0) { map['M'][key] = formatted; } } return map; } /** * @api private */ function formatSet(data, options) { options = options || {}; var values = data.values; if (options.convertEmptyValues) { values = filterEmptySetValues(data); if (values.length === 0) { return AWS.DynamoDB.Converter.input(null); } } var map = {}; switch (data.type) { case 'String': map['SS'] = values; break; case 'Binary': map['BS'] = values; break; case 'Number': map['NS'] = values.map(function (value) { return value.toString(); }); } return map; } /** * @api private */ function filterEmptySetValues(set) { var nonEmptyValues = []; var potentiallyEmptyTypes = { String: true, Binary: true, Number: false }; if (potentiallyEmptyTypes[set.type]) { for (var i = 0; i < set.values.length; i++) { if (set.values[i].length === 0) { continue; } nonEmptyValues.push(set.values[i]); } return nonEmptyValues; } return set.values; } module.exports = AWS.DynamoDB.Converter;
apache-2.0
danielknorr/CTK
Libs/Core/Testing/Cpp/ctkHighPrecisionTimerTest.cpp
1432
/*========================================================================= Library: CTK Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.txt Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =========================================================================*/ #include <ctkHighPrecisionTimer.h> #include <QDebug> #include <QTest> //----------------------------------------------------------------------------- int ctkHighPrecisionTimerTest(int /*argc*/, char* /*argv*/[]) { ctkHighPrecisionTimer timer; timer.start(); QTest::qSleep(250); qint64 millis = timer.elapsedMilli(); qint64 micros = timer.elapsedMicro(); if (millis < 200 || millis > 300 || micros < 200*1000 || micros > 300*1000) { qDebug() << "Measured time (" << millis << "ms | " << micros << "us) is not between 200 and 300ms."; return EXIT_FAILURE; } return EXIT_SUCCESS; }
apache-2.0
yuxuac/docs.particular.net
samples/versioning/Version_3/V1.Messages/ISomethingHappened.cs
172
using NServiceBus; #region V1Message namespace V1.Messages { public interface ISomethingHappened : IEvent { int SomeData { get; set; } } } #endregion
apache-2.0
vinodkc/spark
external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaSourceProvider.scala
33130
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.kafka010 import java.{util => ju} import java.util.{Locale, UUID} import scala.collection.JavaConverters._ import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.producer.ProducerConfig import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer} import org.apache.spark.internal.Logging import org.apache.spark.kafka010.KafkaConfigUpdater import org.apache.spark.sql.{AnalysisException, DataFrame, SaveMode, SQLContext} import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap import org.apache.spark.sql.connector.catalog.{SupportsRead, SupportsWrite, Table, TableCapability} import org.apache.spark.sql.connector.metric.{CustomMetric, CustomSumMetric} import org.apache.spark.sql.connector.read.{Batch, Scan, ScanBuilder} import org.apache.spark.sql.connector.read.streaming.{ContinuousStream, MicroBatchStream} import org.apache.spark.sql.connector.write.{LogicalWriteInfo, SupportsTruncate, Write, WriteBuilder} import org.apache.spark.sql.execution.streaming.{Sink, Source} import org.apache.spark.sql.internal.connector.{SimpleTableProvider, SupportsStreamingUpdateAsAppend} import org.apache.spark.sql.sources._ import org.apache.spark.sql.streaming.OutputMode import org.apache.spark.sql.types.StructType import org.apache.spark.sql.util.CaseInsensitiveStringMap /** * The provider class for all Kafka readers and writers. It is designed such that it throws * IllegalArgumentException when the Kafka Dataset is created, so that it can catch * missing options even before the query is started. */ private[kafka010] class KafkaSourceProvider extends DataSourceRegister with StreamSourceProvider with StreamSinkProvider with RelationProvider with CreatableRelationProvider with SimpleTableProvider with Logging { import KafkaSourceProvider._ override def shortName(): String = "kafka" /** * Returns the name and schema of the source. In addition, it also verifies whether the options * are correct and sufficient to create the [[KafkaSource]] when the query is started. */ override def sourceSchema( sqlContext: SQLContext, schema: Option[StructType], providerName: String, parameters: Map[String, String]): (String, StructType) = { val caseInsensitiveParameters = CaseInsensitiveMap(parameters) validateStreamOptions(caseInsensitiveParameters) require(schema.isEmpty, "Kafka source has a fixed schema and cannot be set with a custom one") val includeHeaders = caseInsensitiveParameters.getOrElse(INCLUDE_HEADERS, "false").toBoolean (shortName(), KafkaRecordToRowConverter.kafkaSchema(includeHeaders)) } override def createSource( sqlContext: SQLContext, metadataPath: String, schema: Option[StructType], providerName: String, parameters: Map[String, String]): Source = { val caseInsensitiveParameters = CaseInsensitiveMap(parameters) validateStreamOptions(caseInsensitiveParameters) // Each running query should use its own group id. Otherwise, the query may be only assigned // partial data since Kafka will assign partitions to multiple consumers having the same group // id. Hence, we should generate a unique id for each query. val uniqueGroupId = streamingUniqueGroupId(caseInsensitiveParameters, metadataPath) val specifiedKafkaParams = convertToSpecifiedParams(caseInsensitiveParameters) val startingStreamOffsets = KafkaSourceProvider.getKafkaOffsetRangeLimit( caseInsensitiveParameters, STARTING_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_OPTION_KEY, LatestOffsetRangeLimit) val kafkaOffsetReader = KafkaOffsetReader.build( strategy(caseInsensitiveParameters), kafkaParamsForDriver(specifiedKafkaParams), caseInsensitiveParameters, driverGroupIdPrefix = s"$uniqueGroupId-driver") new KafkaSource( sqlContext, kafkaOffsetReader, kafkaParamsForExecutors(specifiedKafkaParams, uniqueGroupId), caseInsensitiveParameters, metadataPath, startingStreamOffsets, failOnDataLoss(caseInsensitiveParameters)) } override def getTable(options: CaseInsensitiveStringMap): KafkaTable = { val includeHeaders = options.getBoolean(INCLUDE_HEADERS, false) new KafkaTable(includeHeaders) } /** * Returns a new base relation with the given parameters. * * @note The parameters' keywords are case insensitive and this insensitivity is enforced * by the Map that is passed to the function. */ override def createRelation( sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation = { val caseInsensitiveParameters = CaseInsensitiveMap(parameters) validateBatchOptions(caseInsensitiveParameters) val specifiedKafkaParams = convertToSpecifiedParams(caseInsensitiveParameters) val startingRelationOffsets = KafkaSourceProvider.getKafkaOffsetRangeLimit( caseInsensitiveParameters, STARTING_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_OPTION_KEY, EarliestOffsetRangeLimit) assert(startingRelationOffsets != LatestOffsetRangeLimit) val endingRelationOffsets = KafkaSourceProvider.getKafkaOffsetRangeLimit( caseInsensitiveParameters, ENDING_TIMESTAMP_OPTION_KEY, ENDING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, ENDING_OFFSETS_OPTION_KEY, LatestOffsetRangeLimit) assert(endingRelationOffsets != EarliestOffsetRangeLimit) val includeHeaders = caseInsensitiveParameters.getOrElse(INCLUDE_HEADERS, "false").toBoolean new KafkaRelation( sqlContext, strategy(caseInsensitiveParameters), sourceOptions = caseInsensitiveParameters, specifiedKafkaParams = specifiedKafkaParams, failOnDataLoss = failOnDataLoss(caseInsensitiveParameters), includeHeaders = includeHeaders, startingOffsets = startingRelationOffsets, endingOffsets = endingRelationOffsets) } override def createSink( sqlContext: SQLContext, parameters: Map[String, String], partitionColumns: Seq[String], outputMode: OutputMode): Sink = { val caseInsensitiveParameters = CaseInsensitiveMap(parameters) val defaultTopic = caseInsensitiveParameters.get(TOPIC_OPTION_KEY).map(_.trim) val specifiedKafkaParams = kafkaParamsForProducer(caseInsensitiveParameters) new KafkaSink(sqlContext, specifiedKafkaParams, defaultTopic) } override def createRelation( outerSQLContext: SQLContext, mode: SaveMode, parameters: Map[String, String], data: DataFrame): BaseRelation = { mode match { case SaveMode.Overwrite | SaveMode.Ignore => throw new AnalysisException(s"Save mode $mode not allowed for Kafka. " + s"Allowed save modes are ${SaveMode.Append} and " + s"${SaveMode.ErrorIfExists} (default).") case _ => // good } val caseInsensitiveParameters = CaseInsensitiveMap(parameters) val topic = caseInsensitiveParameters.get(TOPIC_OPTION_KEY).map(_.trim) val specifiedKafkaParams = kafkaParamsForProducer(caseInsensitiveParameters) KafkaWriter.write(outerSQLContext.sparkSession, data.queryExecution, specifiedKafkaParams, topic) /* This method is suppose to return a relation that reads the data that was written. * We cannot support this for Kafka. Therefore, in order to make things consistent, * we return an empty base relation. */ new BaseRelation { override def sqlContext: SQLContext = unsupportedException override def schema: StructType = unsupportedException override def needConversion: Boolean = unsupportedException override def sizeInBytes: Long = unsupportedException override def unhandledFilters(filters: Array[Filter]): Array[Filter] = unsupportedException private def unsupportedException = throw new UnsupportedOperationException("BaseRelation from Kafka write " + "operation is not usable.") } } private def strategy(params: CaseInsensitiveMap[String]) = { val lowercaseParams = params.map { case (k, v) => (k.toLowerCase(Locale.ROOT), v) } lowercaseParams.find(x => STRATEGY_OPTION_KEYS.contains(x._1)).get match { case (ASSIGN, value) => AssignStrategy(JsonUtils.partitions(value)) case (SUBSCRIBE, value) => SubscribeStrategy(value.split(",").map(_.trim()).filter(_.nonEmpty)) case (SUBSCRIBE_PATTERN, value) => SubscribePatternStrategy(value.trim()) case _ => // Should never reach here as we are already matching on // matched strategy names throw new IllegalArgumentException("Unknown option") } } private def failOnDataLoss(params: CaseInsensitiveMap[String]) = params.getOrElse(FAIL_ON_DATA_LOSS_OPTION_KEY, "true").toBoolean private def validateGeneralOptions(params: CaseInsensitiveMap[String]): Unit = { // Validate source options val lowercaseParams = params.map { case (k, v) => (k.toLowerCase(Locale.ROOT), v) } val specifiedStrategies = lowercaseParams.filter { case (k, _) => STRATEGY_OPTION_KEYS.contains(k) }.toSeq if (specifiedStrategies.isEmpty) { throw new IllegalArgumentException( "One of the following options must be specified for Kafka source: " + STRATEGY_OPTION_KEYS.mkString(", ") + ". See the docs for more details.") } else if (specifiedStrategies.size > 1) { throw new IllegalArgumentException( "Only one of the following options can be specified for Kafka source: " + STRATEGY_OPTION_KEYS.mkString(", ") + ". See the docs for more details.") } lowercaseParams.find(x => STRATEGY_OPTION_KEYS.contains(x._1)).get match { case (ASSIGN, value) => if (!value.trim.startsWith("{")) { throw new IllegalArgumentException( "No topicpartitions to assign as specified value for option " + s"'assign' is '$value'") } case (SUBSCRIBE, value) => val topics = value.split(",").map(_.trim).filter(_.nonEmpty) if (topics.isEmpty) { throw new IllegalArgumentException( "No topics to subscribe to as specified value for option " + s"'subscribe' is '$value'") } case (SUBSCRIBE_PATTERN, value) => val pattern = params(SUBSCRIBE_PATTERN).trim() if (pattern.isEmpty) { throw new IllegalArgumentException( "Pattern to subscribe is empty as specified value for option " + s"'subscribePattern' is '$value'") } case _ => // Should never reach here as we are already matching on // matched strategy names throw new IllegalArgumentException("Unknown option") } // Validate minPartitions value if present if (params.contains(MIN_PARTITIONS_OPTION_KEY)) { val p = params(MIN_PARTITIONS_OPTION_KEY).toInt if (p <= 0) throw new IllegalArgumentException("minPartitions must be positive") } // Validate user-specified Kafka options if (params.contains(s"kafka.${ConsumerConfig.GROUP_ID_CONFIG}")) { logWarning(CUSTOM_GROUP_ID_ERROR_MESSAGE) if (params.contains(GROUP_ID_PREFIX)) { logWarning("Option 'groupIdPrefix' will be ignored as " + s"option 'kafka.${ConsumerConfig.GROUP_ID_CONFIG}' has been set.") } } if (params.contains(s"kafka.${ConsumerConfig.AUTO_OFFSET_RESET_CONFIG}")) { throw new IllegalArgumentException( s""" |Kafka option '${ConsumerConfig.AUTO_OFFSET_RESET_CONFIG}' is not supported. |Instead set the source option '$STARTING_OFFSETS_OPTION_KEY' to 'earliest' or 'latest' |to specify where to start. Structured Streaming manages which offsets are consumed |internally, rather than relying on the kafkaConsumer to do it. This will ensure that no |data is missed when new topics/partitions are dynamically subscribed. Note that |'$STARTING_OFFSETS_OPTION_KEY' only applies when a new Streaming query is started, and |that resuming will always pick up from where the query left off. See the docs for more |details. """.stripMargin) } if (params.contains(s"kafka.${ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG}")) { throw new IllegalArgumentException( s"Kafka option '${ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG}' is not supported as keys " + "are deserialized as byte arrays with ByteArrayDeserializer. Use DataFrame operations " + "to explicitly deserialize the keys.") } if (params.contains(s"kafka.${ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG}")) { throw new IllegalArgumentException( s"Kafka option '${ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG}' is not supported as " + "values are deserialized as byte arrays with ByteArrayDeserializer. Use DataFrame " + "operations to explicitly deserialize the values.") } val otherUnsupportedConfigs = Seq( ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, // committing correctly requires new APIs in Source ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG) // interceptors can modify payload, so not safe otherUnsupportedConfigs.foreach { c => if (params.contains(s"kafka.$c")) { throw new IllegalArgumentException(s"Kafka option '$c' is not supported") } } if (!params.contains(s"kafka.${ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG}")) { throw new IllegalArgumentException( s"Option 'kafka.${ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG}' must be specified for " + s"configuring Kafka consumer") } if (params.contains(MIN_OFFSET_PER_TRIGGER) && params.contains(MAX_OFFSET_PER_TRIGGER)) { val minOffsets = params.get(MIN_OFFSET_PER_TRIGGER).get.toLong val maxOffsets = params.get(MAX_OFFSET_PER_TRIGGER).get.toLong if (minOffsets > maxOffsets) { throw new IllegalArgumentException(s"The value of minOffsetPerTrigger($minOffsets) is " + s"higher than the maxOffsetsPerTrigger($maxOffsets).") } } } private def validateStreamOptions(params: CaseInsensitiveMap[String]) = { // Stream specific options params.get(ENDING_OFFSETS_OPTION_KEY).map(_ => throw new IllegalArgumentException("ending offset not valid in streaming queries")) params.get(ENDING_OFFSETS_BY_TIMESTAMP_OPTION_KEY).map(_ => throw new IllegalArgumentException("ending timestamp not valid in streaming queries")) validateGeneralOptions(params) } private def validateBatchOptions(params: CaseInsensitiveMap[String]) = { // Batch specific options KafkaSourceProvider.getKafkaOffsetRangeLimit( params, STARTING_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_OPTION_KEY, EarliestOffsetRangeLimit) match { case EarliestOffsetRangeLimit => // good to go case LatestOffsetRangeLimit => throw new IllegalArgumentException("starting offset can't be latest " + "for batch queries on Kafka") case SpecificOffsetRangeLimit(partitionOffsets) => partitionOffsets.foreach { case (tp, off) if off == KafkaOffsetRangeLimit.LATEST => throw new IllegalArgumentException(s"startingOffsets for $tp can't " + "be latest for batch queries on Kafka") case _ => // ignore } case _: SpecificTimestampRangeLimit => // good to go case _: GlobalTimestampRangeLimit => // good to go } KafkaSourceProvider.getKafkaOffsetRangeLimit( params, ENDING_TIMESTAMP_OPTION_KEY, ENDING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, ENDING_OFFSETS_OPTION_KEY, LatestOffsetRangeLimit) match { case EarliestOffsetRangeLimit => throw new IllegalArgumentException("ending offset can't be earliest " + "for batch queries on Kafka") case LatestOffsetRangeLimit => // good to go case SpecificOffsetRangeLimit(partitionOffsets) => partitionOffsets.foreach { case (tp, off) if off == KafkaOffsetRangeLimit.EARLIEST => throw new IllegalArgumentException(s"ending offset for $tp can't be " + "earliest for batch queries on Kafka") case _ => // ignore } case _: SpecificTimestampRangeLimit => // good to go case _: GlobalTimestampRangeLimit => // good to go } validateGeneralOptions(params) // Don't want to throw an error, but at least log a warning. if (params.contains(MAX_OFFSET_PER_TRIGGER)) { logWarning("maxOffsetsPerTrigger option ignored in batch queries") } if (params.contains(MIN_OFFSET_PER_TRIGGER)) { logWarning("minOffsetsPerTrigger option ignored in batch queries") } if (params.contains(MAX_TRIGGER_DELAY)) { logWarning("maxTriggerDelay option ignored in batch queries") } } class KafkaTable(includeHeaders: Boolean) extends Table with SupportsRead with SupportsWrite { override def name(): String = "KafkaTable" override def schema(): StructType = KafkaRecordToRowConverter.kafkaSchema(includeHeaders) override def capabilities(): ju.Set[TableCapability] = { import TableCapability._ // ACCEPT_ANY_SCHEMA is needed because of the following reasons: // * Kafka writer validates the schema instead of the SQL analyzer (the schema is fixed) // * Read schema differs from write schema (please see Kafka integration guide) ju.EnumSet.of(BATCH_READ, BATCH_WRITE, MICRO_BATCH_READ, CONTINUOUS_READ, STREAMING_WRITE, ACCEPT_ANY_SCHEMA) } override def newScanBuilder(options: CaseInsensitiveStringMap): ScanBuilder = () => new KafkaScan(options) override def newWriteBuilder(info: LogicalWriteInfo): WriteBuilder = { new WriteBuilder with SupportsTruncate with SupportsStreamingUpdateAsAppend { private val options = info.options private val inputSchema: StructType = info.schema() private val topic = Option(options.get(TOPIC_OPTION_KEY)).map(_.trim) private val producerParams = kafkaParamsForProducer(CaseInsensitiveMap(options.asScala.toMap)) override def build(): Write = KafkaWrite(topic, producerParams, inputSchema) override def truncate(): WriteBuilder = this } } } class KafkaScan(options: CaseInsensitiveStringMap) extends Scan { val includeHeaders = options.getBoolean(INCLUDE_HEADERS, false) override def readSchema(): StructType = { KafkaRecordToRowConverter.kafkaSchema(includeHeaders) } override def toBatch(): Batch = { val caseInsensitiveOptions = CaseInsensitiveMap(options.asScala.toMap) validateBatchOptions(caseInsensitiveOptions) val specifiedKafkaParams = convertToSpecifiedParams(caseInsensitiveOptions) val startingRelationOffsets = KafkaSourceProvider.getKafkaOffsetRangeLimit( caseInsensitiveOptions, STARTING_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_OPTION_KEY, EarliestOffsetRangeLimit) val endingRelationOffsets = KafkaSourceProvider.getKafkaOffsetRangeLimit( caseInsensitiveOptions, ENDING_TIMESTAMP_OPTION_KEY, ENDING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, ENDING_OFFSETS_OPTION_KEY, LatestOffsetRangeLimit) new KafkaBatch( strategy(caseInsensitiveOptions), caseInsensitiveOptions, specifiedKafkaParams, failOnDataLoss(caseInsensitiveOptions), startingRelationOffsets, endingRelationOffsets, includeHeaders) } override def toMicroBatchStream(checkpointLocation: String): MicroBatchStream = { val caseInsensitiveOptions = CaseInsensitiveMap(options.asScala.toMap) validateStreamOptions(caseInsensitiveOptions) // Each running query should use its own group id. Otherwise, the query may be only assigned // partial data since Kafka will assign partitions to multiple consumers having the same group // id. Hence, we should generate a unique id for each query. val uniqueGroupId = streamingUniqueGroupId(caseInsensitiveOptions, checkpointLocation) val specifiedKafkaParams = convertToSpecifiedParams(caseInsensitiveOptions) val startingStreamOffsets = KafkaSourceProvider.getKafkaOffsetRangeLimit( caseInsensitiveOptions, STARTING_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_OPTION_KEY, LatestOffsetRangeLimit) val kafkaOffsetReader = KafkaOffsetReader.build( strategy(caseInsensitiveOptions), kafkaParamsForDriver(specifiedKafkaParams), caseInsensitiveOptions, driverGroupIdPrefix = s"$uniqueGroupId-driver") new KafkaMicroBatchStream( kafkaOffsetReader, kafkaParamsForExecutors(specifiedKafkaParams, uniqueGroupId), options, checkpointLocation, startingStreamOffsets, failOnDataLoss(caseInsensitiveOptions)) } override def toContinuousStream(checkpointLocation: String): ContinuousStream = { val caseInsensitiveOptions = CaseInsensitiveMap(options.asScala.toMap) validateStreamOptions(caseInsensitiveOptions) // Each running query should use its own group id. Otherwise, the query may be only assigned // partial data since Kafka will assign partitions to multiple consumers having the same group // id. Hence, we should generate a unique id for each query. val uniqueGroupId = streamingUniqueGroupId(caseInsensitiveOptions, checkpointLocation) val specifiedKafkaParams = convertToSpecifiedParams(caseInsensitiveOptions) val startingStreamOffsets = KafkaSourceProvider.getKafkaOffsetRangeLimit( caseInsensitiveOptions, STARTING_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_BY_TIMESTAMP_OPTION_KEY, STARTING_OFFSETS_OPTION_KEY, LatestOffsetRangeLimit) val kafkaOffsetReader = KafkaOffsetReader.build( strategy(caseInsensitiveOptions), kafkaParamsForDriver(specifiedKafkaParams), caseInsensitiveOptions, driverGroupIdPrefix = s"$uniqueGroupId-driver") new KafkaContinuousStream( kafkaOffsetReader, kafkaParamsForExecutors(specifiedKafkaParams, uniqueGroupId), options, checkpointLocation, startingStreamOffsets, failOnDataLoss(caseInsensitiveOptions)) } override def supportedCustomMetrics(): Array[CustomMetric] = { Array(new OffsetOutOfRangeMetric, new DataLossMetric) } } } private[spark] class OffsetOutOfRangeMetric extends CustomSumMetric { override def name(): String = "offsetOutOfRange" override def description(): String = "estimated number of fetched offsets out of range" } private[spark] class DataLossMetric extends CustomSumMetric { override def name(): String = "dataLoss" override def description(): String = "number of data loss error" } private[kafka010] object KafkaSourceProvider extends Logging { private val ASSIGN = "assign" private val SUBSCRIBE_PATTERN = "subscribepattern" private val SUBSCRIBE = "subscribe" private val STRATEGY_OPTION_KEYS = Set(SUBSCRIBE, SUBSCRIBE_PATTERN, ASSIGN) private[kafka010] val STARTING_OFFSETS_OPTION_KEY = "startingoffsets" private[kafka010] val ENDING_OFFSETS_OPTION_KEY = "endingoffsets" private[kafka010] val STARTING_OFFSETS_BY_TIMESTAMP_OPTION_KEY = "startingoffsetsbytimestamp" private[kafka010] val ENDING_OFFSETS_BY_TIMESTAMP_OPTION_KEY = "endingoffsetsbytimestamp" private[kafka010] val STARTING_TIMESTAMP_OPTION_KEY = "startingtimestamp" private[kafka010] val ENDING_TIMESTAMP_OPTION_KEY = "endingtimestamp" private val FAIL_ON_DATA_LOSS_OPTION_KEY = "failondataloss" private[kafka010] val MIN_PARTITIONS_OPTION_KEY = "minpartitions" private[kafka010] val MAX_OFFSET_PER_TRIGGER = "maxoffsetspertrigger" private[kafka010] val MIN_OFFSET_PER_TRIGGER = "minoffsetspertrigger" private[kafka010] val MAX_TRIGGER_DELAY = "maxtriggerdelay" private[kafka010] val DEFAULT_MAX_TRIGGER_DELAY = "15m" private[kafka010] val FETCH_OFFSET_NUM_RETRY = "fetchoffset.numretries" private[kafka010] val FETCH_OFFSET_RETRY_INTERVAL_MS = "fetchoffset.retryintervalms" private[kafka010] val CONSUMER_POLL_TIMEOUT = "kafkaconsumer.polltimeoutms" private[kafka010] val STARTING_OFFSETS_BY_TIMESTAMP_STRATEGY_KEY = "startingoffsetsbytimestampstrategy" private val GROUP_ID_PREFIX = "groupidprefix" private[kafka010] val INCLUDE_HEADERS = "includeheaders" private[kafka010] object StrategyOnNoMatchStartingOffset extends Enumeration { val ERROR, LATEST = Value } val TOPIC_OPTION_KEY = "topic" val INSTRUCTION_FOR_FAIL_ON_DATA_LOSS_FALSE = """ |Some data may have been lost because they are not available in Kafka any more; either the | data was aged out by Kafka or the topic may have been deleted before all the data in the | topic was processed. If you want your streaming query to fail on such cases, set the source | option "failOnDataLoss" to "true". """.stripMargin val INSTRUCTION_FOR_FAIL_ON_DATA_LOSS_TRUE = """ |Some data may have been lost because they are not available in Kafka any more; either the | data was aged out by Kafka or the topic may have been deleted before all the data in the | topic was processed. If you don't want your streaming query to fail on such cases, set the | source option "failOnDataLoss" to "false". """.stripMargin val CUSTOM_GROUP_ID_ERROR_MESSAGE = s"""Kafka option 'kafka.${ConsumerConfig.GROUP_ID_CONFIG}' has been set on this query, it is | not recommended to set this option. This option is unsafe to use since multiple concurrent | queries or sources using the same group id will interfere with each other as they are part | of the same consumer group. Restarted queries may also suffer interference from the | previous run having the same group id. The user should have only one query per group id, | and/or set the option 'kafka.session.timeout.ms' to be very small so that the Kafka | consumers from the previous query are marked dead by the Kafka group coordinator before the | restarted query starts running. """.stripMargin private val serClassName = classOf[ByteArraySerializer].getName private val deserClassName = classOf[ByteArrayDeserializer].getName def getKafkaOffsetRangeLimit( params: CaseInsensitiveMap[String], globalOffsetTimestampOptionKey: String, offsetByTimestampOptionKey: String, offsetOptionKey: String, defaultOffsets: KafkaOffsetRangeLimit): KafkaOffsetRangeLimit = { // The order below represents "preferences" val strategyOnNoMatchStartingOffset = params.get(STARTING_OFFSETS_BY_TIMESTAMP_STRATEGY_KEY) .map(v => StrategyOnNoMatchStartingOffset.withName(v.toUpperCase(Locale.ROOT))) .getOrElse(StrategyOnNoMatchStartingOffset.ERROR) if (params.contains(globalOffsetTimestampOptionKey)) { // 1. global timestamp val tsStr = params(globalOffsetTimestampOptionKey).trim try { val ts = tsStr.toLong GlobalTimestampRangeLimit(ts, strategyOnNoMatchStartingOffset) } catch { case _: NumberFormatException => throw new IllegalArgumentException(s"Expected a single long value, got $tsStr") } } else if (params.contains(offsetByTimestampOptionKey)) { // 2. timestamp per topic partition val json = params(offsetByTimestampOptionKey).trim SpecificTimestampRangeLimit(JsonUtils.partitionTimestamps(json), strategyOnNoMatchStartingOffset) } else { // 3. latest/earliest/offset params.get(offsetOptionKey).map(_.trim) match { case Some(offset) if offset.toLowerCase(Locale.ROOT) == "latest" => LatestOffsetRangeLimit case Some(offset) if offset.toLowerCase(Locale.ROOT) == "earliest" => EarliestOffsetRangeLimit case Some(json) => SpecificOffsetRangeLimit(JsonUtils.partitionOffsets(json)) case None => defaultOffsets } } } def kafkaParamsForDriver(specifiedKafkaParams: Map[String, String]): ju.Map[String, Object] = KafkaConfigUpdater("source", specifiedKafkaParams) .set(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deserClassName) .set(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserClassName) // Set to "earliest" to avoid exceptions. However, KafkaSource will fetch the initial // offsets by itself instead of counting on KafkaConsumer. .set(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") // So that consumers in the driver does not commit offsets unnecessarily .set(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false") // So that the driver does not pull too much data .set(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, java.lang.Integer.valueOf(1)) // If buffer config is not set, set it to reasonable value to work around // buffer issues (see KAFKA-3135) .setIfUnset(ConsumerConfig.RECEIVE_BUFFER_CONFIG, 65536: java.lang.Integer) .build() def kafkaParamsForExecutors( specifiedKafkaParams: Map[String, String], uniqueGroupId: String): ju.Map[String, Object] = KafkaConfigUpdater("executor", specifiedKafkaParams) .set(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deserClassName) .set(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserClassName) // Make sure executors do only what the driver tells them. .set(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "none") // So that consumers in executors do not mess with any existing group id .setIfUnset(ConsumerConfig.GROUP_ID_CONFIG, s"$uniqueGroupId-executor") // So that consumers in executors does not commit offsets unnecessarily .set(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false") // If buffer config is not set, set it to reasonable value to work around // buffer issues (see KAFKA-3135) .setIfUnset(ConsumerConfig.RECEIVE_BUFFER_CONFIG, 65536: java.lang.Integer) .build() /** * Returns a unique batch consumer group (group.id), allowing the user to set the prefix of * the consumer group */ private[kafka010] def batchUniqueGroupId(params: CaseInsensitiveMap[String]): String = { val groupIdPrefix = params.getOrElse(GROUP_ID_PREFIX, "spark-kafka-relation") s"${groupIdPrefix}-${UUID.randomUUID}" } /** * Returns a unique streaming consumer group (group.id), allowing the user to set the prefix of * the consumer group */ private def streamingUniqueGroupId( params: CaseInsensitiveMap[String], metadataPath: String): String = { val groupIdPrefix = params.getOrElse(GROUP_ID_PREFIX, "spark-kafka-source") s"${groupIdPrefix}-${UUID.randomUUID}-${metadataPath.hashCode}" } private[kafka010] def kafkaParamsForProducer( params: CaseInsensitiveMap[String]): ju.Map[String, Object] = { if (params.contains(s"kafka.${ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG}")) { throw new IllegalArgumentException( s"Kafka option '${ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG}' is not supported as keys " + "are serialized with ByteArraySerializer.") } if (params.contains(s"kafka.${ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG}")) { throw new IllegalArgumentException( s"Kafka option '${ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG}' is not supported as " + "value are serialized with ByteArraySerializer.") } val specifiedKafkaParams = convertToSpecifiedParams(params) KafkaConfigUpdater("executor", specifiedKafkaParams) .set(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, serClassName) .set(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, serClassName) .build() } private def convertToSpecifiedParams(parameters: Map[String, String]): Map[String, String] = { parameters .keySet .filter(_.toLowerCase(Locale.ROOT).startsWith("kafka.")) .map { k => k.drop(6).toString -> parameters(k) } .toMap } }
apache-2.0
minestarks/TypeScript
tests/baselines/reference/useSharedArrayBuffer3.js
264
//// [useSharedArrayBuffer3.ts] var foge = new SharedArrayBuffer(1024); var bar = foge.slice(1, 10); var len = foge.byteLength; //// [useSharedArrayBuffer3.js] var foge = new SharedArrayBuffer(1024); var bar = foge.slice(1, 10); var len = foge.byteLength;
apache-2.0
Natrezim/perun
perun-web-gui/src/main/java/cz/metacentrum/perun/webgui/json/securityTeamsManager/RemoveUserFromBlacklist.java
3098
package cz.metacentrum.perun.webgui.json.securityTeamsManager; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.json.client.JSONNumber; import com.google.gwt.json.client.JSONObject; import com.google.gwt.user.client.ui.HTML; import cz.metacentrum.perun.webgui.client.PerunWebSession; import cz.metacentrum.perun.webgui.json.JsonCallbackEvents; import cz.metacentrum.perun.webgui.json.JsonPostClient; import cz.metacentrum.perun.webgui.model.PerunError; import cz.metacentrum.perun.webgui.widgets.Confirm; /** * Ajax query which removes user from SecurityTeams blacklist * * @author Pavel Zlamal <[email protected]> */ public class RemoveUserFromBlacklist { // web session private PerunWebSession session = PerunWebSession.getInstance(); // URL to call final String JSON_URL = "securityTeamsManager/removeUserFromBlacklist"; // custom events private JsonCallbackEvents events = new JsonCallbackEvents(); // ids private int securityTeamId = 0; private int userId = 0; /** * Creates a new request * @param id */ public RemoveUserFromBlacklist(int id) { this.securityTeamId = id; } /** * Creates a new request with custom events * * @param id * @param events Custom events */ public RemoveUserFromBlacklist(int id, JsonCallbackEvents events) { this.securityTeamId = id; this.events = events; } /** * Tests the values, if the process can continue * * @return true/false for continue/stop */ private boolean testDeleting() { boolean result = true; String errorMsg = ""; if(securityTeamId == 0){ errorMsg += "Wrong parameter <strong>SECURITY TEAM ID</strong>.\n"; result = false; } if(errorMsg.length()>0){ Confirm c = new Confirm("Error while deleting Security Team", new HTML(errorMsg), true); c.show(); } return result; } /** * Attempts to remove user from blacklist of Security Team, it first tests the values and then submits them. * * @param userId ID of User to be removed from blacklist */ public void removeUserFromBlacklist(final int userId) { this.userId = userId; // test arguments if(!this.testDeleting()){ return; } // new events JsonCallbackEvents newEvents = new JsonCallbackEvents(){ public void onError(PerunError error) { session.getUiElements().setLogErrorText("Removing user " + userId + " from blacklist failed."); events.onError(error); }; public void onFinished(JavaScriptObject jso) { session.getUiElements().setLogSuccessText("User " + userId + " removed from blacklist."); events.onFinished(jso); }; public void onLoadingStart() { events.onLoadingStart(); }; }; // sending data JsonPostClient jspc = new JsonPostClient(newEvents); jspc.sendData(JSON_URL, prepareJSONObject()); } /** * Prepares a JSON object * * @return JSONObject the whole query */ private JSONObject prepareJSONObject() { JSONObject jsonQuery = new JSONObject(); jsonQuery.put("securityTeam", new JSONNumber(securityTeamId)); jsonQuery.put("user", new JSONNumber(userId)); return jsonQuery; } }
bsd-2-clause
georgemarshall/django
django/core/management/commands/sqlflush.py
946
from django.core.management.base import BaseCommand from django.core.management.sql import sql_flush from django.db import DEFAULT_DB_ALIAS, connections class Command(BaseCommand): help = ( "Returns a list of the SQL statements required to return all tables in " "the database to the state they were in just after they were installed." ) output_transaction = True def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates a database to print the SQL for. Defaults to the "default" database.', ) def handle(self, **options): sql_statements = sql_flush(self.style, connections[options['database']], only_django=True) if not sql_statements and options['verbosity'] >= 1: self.stderr.write('No tables found.') return '\n'.join(sql_statements)
bsd-3-clause
nyotis/blaze-lib
blazetest/src/mathtest/dmatdmatadd/UDaLDb.cpp
4020
//================================================================================================= /*! // \file src/mathtest/dmatdmatadd/UDaLDb.cpp // \brief Source file for the UDaLDb dense matrix/dense matrix addition math test // // Copyright (C) 2013 Klaus Iglberger - All Rights Reserved // // This file is part of the Blaze library. You can redistribute it and/or modify it under // the terms of the New (Revised) BSD License. Redistribution and use in source and binary // forms, with or without modification, are permitted provided that the following conditions // are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, this list // of conditions and the following disclaimer in the documentation and/or other materials // provided with the distribution. // 3. Neither the names of the Blaze development group nor the names of its contributors // may be used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT // SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR // BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH // DAMAGE. */ //================================================================================================= //************************************************************************************************* // Includes //************************************************************************************************* #include <cstdlib> #include <iostream> #include <blaze/math/DynamicMatrix.h> #include <blaze/math/LowerMatrix.h> #include <blaze/math/UpperMatrix.h> #include <blazetest/mathtest/Creator.h> #include <blazetest/mathtest/dmatdmatadd/OperationTest.h> #include <blazetest/system/MathTest.h> //================================================================================================= // // MAIN FUNCTION // //================================================================================================= //************************************************************************************************* int main() { std::cout << " Running 'UDaLDb'..." << std::endl; using blazetest::mathtest::TypeA; using blazetest::mathtest::TypeB; try { // Matrix type definitions typedef blaze::UpperMatrix< blaze::DynamicMatrix<TypeA> > UDa; typedef blaze::LowerMatrix< blaze::DynamicMatrix<TypeB> > LDb; // Creator type definitions typedef blazetest::Creator<UDa> CUDa; typedef blazetest::Creator<LDb> CLDb; // Running tests with small matrices for( size_t i=0UL; i<=9UL; ++i ) { RUN_DMATDMATADD_OPERATION_TEST( CUDa( i ), CLDb( i ) ); } // Running tests with large matrices RUN_DMATDMATADD_OPERATION_TEST( CUDa( 67UL ), CLDb( 67UL ) ); RUN_DMATDMATADD_OPERATION_TEST( CUDa( 128UL ), CLDb( 128UL ) ); } catch( std::exception& ex ) { std::cerr << "\n\n ERROR DETECTED during dense matrix/dense matrix addition:\n" << ex.what() << "\n"; return EXIT_FAILURE; } return EXIT_SUCCESS; } //*************************************************************************************************
bsd-3-clause
agencja-acclaim/gulp-bower-webapp
wp-content/plugins/updraftplus/example-decrypt.php
1107
<?php // @codingStandardsIgnoreStart /* To dump the decrypted file using the given key on stdout, call: rijndael_decrypt_file('../path/to/file.crypt' , 'mykey'); Thus, here are the easy instructions: 1) Add a line like the above into this PHP file (not inside these comments, but outside) e.g. rijndael_decrypt_file('/home/myself/myfile.crypt' , 'MYKEY'); 2) Run this file (and make sure that includes/Rijndael.php is available, if you are moving this file around) e.g. php /home/myself/example-decrypt.php >output.sql.gz 3) You may then want to gunzip the resulting file to have a standard SQL file. e.g. gunzip output.sql.gz */ // @codingStandardsIgnoreEnd /** * An example of how to decrypt a file * * @param String $file Full path to file to decrypt * @param String $key Key or salting to be used */ function rijndael_decrypt_file($file, $key) { include_once(dirname(__FILE__).'/vendor/phpseclib/phpseclib/phpseclib/Crypt/Rijndael.php'); $rijndael = new Crypt_Rijndael(); $rijndael->setKey($key); $ciphertext = file_get_contents($file); print $rijndael->decrypt($ciphertext); }
mit
royvandewater/trading-post
vendor/github.com/lestrrat/go-jwx/internal/rsautil/rsautil.go
2161
package rsautil import ( "crypto/rsa" "encoding/json" "math/big" "github.com/lestrrat/go-jwx/buffer" ) type rawkey struct { N buffer.Buffer `json:"n"` E buffer.Buffer `json:"e"` D buffer.Buffer `json:"d"` P buffer.Buffer `json:"p"` Q buffer.Buffer `json:"q"` Dp buffer.Buffer `json:"dp"` Dq buffer.Buffer `json:"dq"` Qi buffer.Buffer `json:"qi"` } func NewRawKeyFromPublicKey(pubkey *rsa.PublicKey) *rawkey { r := &rawkey{} r.N = buffer.Buffer(pubkey.N.Bytes()) r.E = buffer.FromUint(uint64(pubkey.E)) return r } func NewRawKeyFromPrivateKey(privkey *rsa.PrivateKey) *rawkey { r := NewRawKeyFromPublicKey(&privkey.PublicKey) r.D = buffer.Buffer(privkey.D.Bytes()) r.P = buffer.Buffer(privkey.Primes[0].Bytes()) r.Q = buffer.Buffer(privkey.Primes[1].Bytes()) r.Dp = buffer.Buffer(privkey.Precomputed.Dp.Bytes()) r.Dq = buffer.Buffer(privkey.Precomputed.Dq.Bytes()) r.Qi = buffer.Buffer(privkey.Precomputed.Qinv.Bytes()) return r } func PublicKeyFromJSON(data []byte) (*rsa.PublicKey, error) { r := rawkey{} if err := json.Unmarshal(data, &r); err != nil { return nil, err } return r.GeneratePublicKey() } func PrivateKeyFromJSON(data []byte) (*rsa.PrivateKey, error) { r := rawkey{} if err := json.Unmarshal(data, &r); err != nil { return nil, err } return r.GeneratePrivateKey() } func (r rawkey) GeneratePublicKey() (*rsa.PublicKey, error) { return &rsa.PublicKey{ N: (&big.Int{}).SetBytes(r.N.Bytes()), E: int((&big.Int{}).SetBytes(r.E.Bytes()).Int64()), }, nil } func (r rawkey) GeneratePrivateKey() (*rsa.PrivateKey, error) { pubkey, err := r.GeneratePublicKey() if err != nil { return nil, err } privkey := &rsa.PrivateKey{ PublicKey: *pubkey, D: (&big.Int{}).SetBytes(r.D.Bytes()), Primes: []*big.Int{ (&big.Int{}).SetBytes(r.P.Bytes()), (&big.Int{}).SetBytes(r.Q.Bytes()), }, } if r.Dp.Len() > 0 { privkey.Precomputed.Dp = (&big.Int{}).SetBytes(r.Dp.Bytes()) } if r.Dq.Len() > 0 { privkey.Precomputed.Dq = (&big.Int{}).SetBytes(r.Dq.Bytes()) } if r.Qi.Len() > 0 { privkey.Precomputed.Qinv = (&big.Int{}).SetBytes(r.Qi.Bytes()) } return privkey, nil }
mit