repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
LiuCAs/Gekosale
plugin/Gekosale/Frontend/newsletterbox/controller/newsletterbox.php
3827
<?php /** * Gekosale, Open Source E-Commerce Solution * http://www.gekosale.pl * * Copyright (c) 2008-2012 Gekosale. Zabronione jest usuwanie informacji o licencji i autorach. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * * $Revision: 438 $ * $Author: gekosale $ * $Date: 2011-08-27 11:29:36 +0200 (So, 27 sie 2011) $ * $Id: newsletterbox.php 438 2011-08-27 09:29:36Z gekosale $ */ namespace Gekosale; use SimpleForm; class NewsletterBoxController extends Component\Controller\Box { public function index () { $param = $this->registry->core->getParam(); if (! empty($param) && $this->registry->router->getCurrentController() == 'newsletter'){ $linkActive = App::getModel('newsletter')->checkLinkToActivate($param); if ($linkActive > 0){ $change = App::getModel('newsletter')->changeNewsletterStatus($linkActive); $this->registry->template->assign('activelink', 1); } else{ $inactiveLink = App::getModel('newsletter')->checkInactiveNewsletter($param); if ($inactiveLink > 0){ App::getModel('newsletter')->deleteClientNewsletter($inactiveLink); $this->registry->template->assign('inactivelink', 1); } else{ $this->registry->template->assign('errlink', 1); } } } $form = new SimpleForm\Form(Array( 'name' => 'newsletter', 'action' => '', 'method' => 'post' )); $action = $form->AddChild(new SimpleForm\Elements\Radio(Array( 'name' => 'action', 'options' => Array( '1' => _('TXT_SIGNUP'), '2' => _('TXT_REMOVE_SUBSCRIPTION') ) ))); $form->AddChild(new SimpleForm\Elements\TextField(Array( 'name' => 'email', 'label' => _('TXT_EMAIL'), 'rules' => Array( new SimpleForm\Rules\Required(_('ERR_EMPTY_EMAIL')), new SimpleForm\Rules\Email(_('ERR_WRONG_EMAIL')) ) ))); $url = $this->registry->router->generate('frontend.conditions', true); $form->AddChild(new SimpleForm\Elements\Checkbox(Array( 'name' => 'confirmterms', 'label' => _('TXT_NEWSLETTER_ACCEPT') . ' <a href="' . $url . '" target="_blank">' . _('TXT_NEWSLETTER_CONDITIONS') . '</a>', 'rules' => Array( new SimpleForm\Rules\RequiredDependency(_('TXT_NEWSLETTER_ACCEPT_CONDITIONS'), $action, new SimpleForm\Conditions\Equals('1')) ), 'default' => 0 ))); $form->Populate(Array( 'action' => 1 )); if ($form->Validate()){ $formData = $form->getSubmitValues(); $this->model = App::getModel('newsletter'); if ($formData['action'] == 1){ $checkEmailExists = $this->model->checkEmailIfExists($formData['email']); if ($checkEmailExists > 0){ $this->registry->template->assign('signup_error', _('ERR_EMAIL_NOT_EXISTS')); } else{ $newId = $this->model->addClientAboutNewsletter($formData['email']); if ($newId > 0){ $this->model->updateNewsletterActiveLink($newId, $formData['email']); } $this->registry->template->assign('signup_success', _('TXT_RECEIVE_EMAIL_WITH_ACTIVE_LINK')); } } if ($formData['action'] == 2){ $checkEmailExists = $this->model->checkEmailIfExists($formData['email']); if ($checkEmailExists > 0){ $this->model->unsetClientAboutNewsletter($checkEmailExists, $formData['email']); $this->registry->template->assign('signup_success', _('TXT_RECEIVE_EMAIL_WITH_DEACTIVE_LINK')); } else{ $this->registry->template->assign('signup_error', _('ERR_EMAIL_NO_EXIST')); } } } $this->registry->template->assign('newsletter', $form->getForm()); return $this->registry->template->fetch($this->loadTemplate('index.tpl')); } }
lgpl-3.0
uschindler/elasticsearch
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java
1176
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; import java.time.ZoneId; /** * DateTimeFunctions that can be mapped as histogram. This means the dates order is maintained * Unfortunately this means only YEAR works since everything else changes the order */ public abstract class DateTimeHistogramFunction extends DateTimeFunction { DateTimeHistogramFunction(Source source, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { super(source, field, zoneId, extractor); } /** * used for aggregation (date histogram) */ public long fixedInterval() { return -1; } public String calendarInterval() { return null; } }
apache-2.0
zhangdian/solr4.6.0
lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/TrecDocParser.java
5106
package org.apache.lucene.benchmark.byTask.feeds; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Locale; import java.util.Map; /** * Parser for trec doc content, invoked on doc text excluding &lt;DOC&gt; and &lt;DOCNO&gt; * which are handled in TrecContentSource. Required to be stateless and hence thread safe. */ public abstract class TrecDocParser { /** Types of trec parse paths, */ public enum ParsePathType { GOV2, FBIS, FT, FR94, LATIMES } /** trec parser type used for unknown extensions */ public static final ParsePathType DEFAULT_PATH_TYPE = ParsePathType.GOV2; static final Map<ParsePathType,TrecDocParser> pathType2parser = new HashMap<ParsePathType,TrecDocParser>(); static { pathType2parser.put(ParsePathType.GOV2, new TrecGov2Parser()); pathType2parser.put(ParsePathType.FBIS, new TrecFBISParser()); pathType2parser.put(ParsePathType.FR94, new TrecFR94Parser()); pathType2parser.put(ParsePathType.FT, new TrecFTParser()); pathType2parser.put(ParsePathType.LATIMES, new TrecLATimesParser()); } static final Map<String,ParsePathType> pathName2Type = new HashMap<String,ParsePathType>(); static { for (ParsePathType ppt : ParsePathType.values()) { pathName2Type.put(ppt.name().toUpperCase(Locale.ROOT),ppt); } } /** max length of walk up from file to its ancestors when looking for a known path type */ private static final int MAX_PATH_LENGTH = 10; /** * Compute the path type of a file by inspecting name of file and its parents */ public static ParsePathType pathType(File f) { int pathLength = 0; while (f != null && ++pathLength < MAX_PATH_LENGTH) { ParsePathType ppt = pathName2Type.get(f.getName().toUpperCase(Locale.ROOT)); if (ppt!=null) { return ppt; } f = f.getParentFile(); } return DEFAULT_PATH_TYPE; } /** * parse the text prepared in docBuf into a result DocData, * no synchronization is required. * @param docData reusable result * @param name name that should be set to the result * @param trecSrc calling trec content source * @param docBuf text to parse * @param pathType type of parsed file, or null if unknown - may be used by * parsers to alter their behavior according to the file path type. */ public abstract DocData parse(DocData docData, String name, TrecContentSource trecSrc, StringBuilder docBuf, ParsePathType pathType) throws IOException; /** * strip tags from <code>buf</code>: each tag is replaced by a single blank. * @return text obtained when stripping all tags from <code>buf</code> (Input StringBuilder is unmodified). */ public static String stripTags(StringBuilder buf, int start) { return stripTags(buf.substring(start),0); } /** * strip tags from input. * @see #stripTags(StringBuilder, int) */ public static String stripTags(String buf, int start) { if (start>0) { buf = buf.substring(0); } return buf.replaceAll("<[^>]*>", " "); } /** * Extract from <code>buf</code> the text of interest within specified tags * @param buf entire input text * @param startTag tag marking start of text of interest * @param endTag tag marking end of text of interest * @param maxPos if &ge; 0 sets a limit on start of text of interest * @return text of interest or null if not found */ public static String extract(StringBuilder buf, String startTag, String endTag, int maxPos, String noisePrefixes[]) { int k1 = buf.indexOf(startTag); if (k1>=0 && (maxPos<0 || k1<maxPos)) { k1 += startTag.length(); int k2 = buf.indexOf(endTag,k1); if (k2>=0 && (maxPos<0 || k2<maxPos)) { // found end tag with allowed range if (noisePrefixes != null) { for (String noise : noisePrefixes) { int k1a = buf.indexOf(noise,k1); if (k1a>=0 && k1a<k2) { k1 = k1a + noise.length(); } } } return buf.substring(k1,k2).trim(); } } return null; } //public static void main(String[] args) { // System.out.println(stripTags("is it true that<space>2<<second space>><almost last space>1<one more space>?",0)); //} }
apache-2.0
meteorcloudy/bazel
src/test/java/com/google/devtools/build/lib/packages/util/MockGenruleSupport.java
1089
// Copyright 2020 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages.util; import java.io.IOException; /** * Creates mock BUILD files required for the genrule rule. */ public final class MockGenruleSupport { /** * Sets up mocking support for genrules. */ public static void setup(MockToolsConfig config) throws IOException { config.create( "tools/genrule/BUILD", "exports_files(['genrule-setup.sh'])"); config.create("tools/genrule/genrule-setup.sh"); } }
apache-2.0
huangpf/azure-sdk-for-net
src/Common/Handlers/ClientRequestTrackingHandler.cs
1492
// // Copyright (c) Microsoft. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Net.Http; using System.Threading; namespace Microsoft.WindowsAzure { public class ClientRequestTrackingHandler : MessageProcessingHandler { public string TrackingId { get; private set; } public ClientRequestTrackingHandler(string trackingId) : base() { TrackingId = trackingId; } protected override HttpRequestMessage ProcessRequest(HttpRequestMessage request, CancellationToken cancellationToken) { request.Headers.Add("client-tracking-id", TrackingId); return request; } protected override HttpResponseMessage ProcessResponse(HttpResponseMessage response, CancellationToken cancellationToken) { response.Headers.Add("client-tracking-id", TrackingId); return response; } } }
apache-2.0
awslabs/aws-sdk-xamarin
AWS.XamarinSDK/AWSSDK_Android/Amazon.SecurityToken/Model/Internal/MarshallTransformations/AssumeRoleWithWebIdentityRequestMarshaller.cs
3141
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the sts-2011-06-15.normal.json service model. */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Text; using System.Xml.Serialization; using Amazon.SecurityToken.Model; using Amazon.Runtime; using Amazon.Runtime.Internal; using Amazon.Runtime.Internal.Transform; using Amazon.Runtime.Internal.Util; namespace Amazon.SecurityToken.Model.Internal.MarshallTransformations { /// <summary> /// AssumeRoleWithWebIdentity Request Marshaller /// </summary> public class AssumeRoleWithWebIdentityRequestMarshaller : IMarshaller<IRequest, AssumeRoleWithWebIdentityRequest> , IMarshaller<IRequest,AmazonWebServiceRequest> { public IRequest Marshall(AmazonWebServiceRequest input) { return this.Marshall((AssumeRoleWithWebIdentityRequest)input); } public IRequest Marshall(AssumeRoleWithWebIdentityRequest publicRequest) { IRequest request = new DefaultRequest(publicRequest, "Amazon.SecurityToken"); request.Parameters.Add("Action", "AssumeRoleWithWebIdentity"); request.Parameters.Add("Version", "2011-06-15"); if(publicRequest != null) { if(publicRequest.IsSetDurationSeconds()) { request.Parameters.Add("DurationSeconds", StringUtils.FromInt(publicRequest.DurationSeconds)); } if(publicRequest.IsSetPolicy()) { request.Parameters.Add("Policy", StringUtils.FromString(publicRequest.Policy)); } if(publicRequest.IsSetProviderId()) { request.Parameters.Add("ProviderId", StringUtils.FromString(publicRequest.ProviderId)); } if(publicRequest.IsSetRoleArn()) { request.Parameters.Add("RoleArn", StringUtils.FromString(publicRequest.RoleArn)); } if(publicRequest.IsSetRoleSessionName()) { request.Parameters.Add("RoleSessionName", StringUtils.FromString(publicRequest.RoleSessionName)); } if(publicRequest.IsSetWebIdentityToken()) { request.Parameters.Add("WebIdentityToken", StringUtils.FromString(publicRequest.WebIdentityToken)); } } return request; } } }
apache-2.0
GunoH/intellij-community
plugins/IntentionPowerPak/src/com/siyeh/ipp/initialization/SplitDeclarationAndInitializationIntention.java
5297
/* * Copyright 2003-2019 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ipp.initialization; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.util.containers.ContainerUtil; import com.siyeh.ig.psiutils.HighlightUtils; import com.siyeh.ipp.base.Intention; import com.siyeh.ipp.base.PsiElementPredicate; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; public class SplitDeclarationAndInitializationIntention extends Intention { @Override @NotNull protected PsiElementPredicate getElementPredicate() { return new SplitDeclarationAndInitializationPredicate(); } @Override public boolean isAvailable(@NotNull Project project, Editor editor, @NotNull PsiElement element) { return PsiTreeUtil.getParentOfType(element, PsiField.class, false, PsiCodeBlock.class) != null && super.isAvailable(project, editor, element); } @Override protected void processIntention(@NotNull PsiElement element) { throw new UnsupportedOperationException("The only 'processIntention(Editor, PsiElement)' is allowed to be invoked."); } @Override public void processIntention(Editor editor, @NotNull PsiElement element) { final PsiField field = (PsiField)element.getParent(); final PsiExpression initializer = field.getInitializer(); if (initializer == null) { return; } final String initializerText = RefactoringUtil.convertInitializerToNormalExpression(initializer, field.getType()).getText(); final PsiClass containingClass = field.getContainingClass(); if (containingClass == null) { return; } final boolean fieldIsStatic = field.hasModifierProperty(PsiModifier.STATIC); final PsiClassInitializer[] classInitializers = containingClass.getInitializers(); PsiClassInitializer classInitializer = null; final int fieldOffset = field.getTextOffset(); for (PsiClassInitializer existingClassInitializer : classInitializers) { final int initializerOffset = existingClassInitializer.getTextOffset(); if (initializerOffset <= fieldOffset) { continue; } final boolean initializerIsStatic = existingClassInitializer.hasModifierProperty(PsiModifier.STATIC); if (initializerIsStatic == fieldIsStatic) { Condition<PsiReference> usedBeforeInitializer = ref -> { PsiElement refElement = ref.getElement(); TextRange textRange = refElement.getTextRange(); return textRange == null || textRange.getStartOffset() < initializerOffset; }; if (!ContainerUtil .exists(ReferencesSearch.search(field, new LocalSearchScope(containingClass)).findAll(), usedBeforeInitializer)) { classInitializer = existingClassInitializer; break; } } } final PsiManager manager = field.getManager(); final Project project = manager.getProject(); final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(project); if (classInitializer == null) { if (PsiUtil.isJavaToken(PsiTreeUtil.skipWhitespacesForward(field), JavaTokenType.COMMA)) { field.normalizeDeclaration(); } classInitializer = (PsiClassInitializer)containingClass.addAfter(elementFactory.createClassInitializer(), field); // add some whitespace between the field and the class initializer final PsiElement whitespace = PsiParserFacade.SERVICE.getInstance(project).createWhiteSpaceFromText("\n"); containingClass.addAfter(whitespace, field); } final PsiCodeBlock body = classInitializer.getBody(); @NonNls final String initializationStatementText = field.getName() + " = " + initializerText + ';'; final PsiExpressionStatement statement = (PsiExpressionStatement)elementFactory.createStatementFromText(initializationStatementText, body); final PsiElement addedElement = body.addAfter(statement, null); if (fieldIsStatic) { final PsiModifierList modifierList = classInitializer.getModifierList(); if (modifierList != null) { modifierList.setModifierProperty(PsiModifier.STATIC, true); } } initializer.delete(); CodeStyleManager.getInstance(manager.getProject()).reformat(classInitializer); HighlightUtils.highlightElement(addedElement, editor); } }
apache-2.0
mgol/pdf.js
test/unit/font_spec.js
16033
/* -*- Mode: Java; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */ /* globals expect, it, describe, CFFCompiler, CFFParser, CFFIndex, CFFStrings, SEAC_ANALYSIS_ENABLED:true, Type1Parser, StringStream */ 'use strict'; describe('font', function() { function hexDump(bytes) { var line = ''; for (var i = 0, ii = bytes.length; i < ii; ++i) { var b = bytes[i].toString(16); if (b.length < 2) { b = '0' + b; } line += b.toString(16); } return line; } // This example font comes from the CFF spec: // http://www.adobe.com/content/dam/Adobe/en/devnet/font/pdfs/5176.CFF.pdf var exampleFont = '0100040100010101134142434445462b' + '54696d65732d526f6d616e000101011f' + 'f81b00f81c02f81d03f819041c6f000d' + 'fb3cfb6efa7cfa1605e911b8f1120003' + '01010813183030312e30303754696d65' + '7320526f6d616e54696d657300000002' + '010102030e0e7d99f92a99fb7695f773' + '8b06f79a93fc7c8c077d99f85695f75e' + '9908fb6e8cf87393f7108b09a70adf0b' + 'f78e14'; var fontData = []; for (var i = 0; i < exampleFont.length; i += 2) { var hex = exampleFont.substr(i, 2); fontData.push(parseInt(hex, 16)); } var bytes = new Uint8Array(fontData); fontData = { getBytes: function() { return bytes; } }; function bytesToString(bytesArray) { var str = ''; for (var i = 0, ii = bytesArray.length; i < ii; i++) { str += String.fromCharCode(bytesArray[i]); } return str; } describe('CFFParser', function() { var parser = new CFFParser(fontData, {}); var cff = parser.parse(); it('parses header', function() { var header = cff.header; expect(header.major).toEqual(1); expect(header.minor).toEqual(0); expect(header.hdrSize).toEqual(4); expect(header.offSize).toEqual(1); }); it('parses name index', function() { var names = cff.names; expect(names.length).toEqual(1); expect(names[0]).toEqual('ABCDEF+Times-Roman'); }); it('sanitizes name index', function() { var index = new CFFIndex(); index.add(['['.charCodeAt(0), 'a'.charCodeAt(0)]); var names = parser.parseNameIndex(index); expect(names).toEqual(['_a']); index = new CFFIndex(); var longName = []; for (var i = 0; i < 129; i++) { longName.push(0); } index.add(longName); names = parser.parseNameIndex(index); expect(names[0].length).toEqual(127); }); it('parses string index', function() { var strings = cff.strings; expect(strings.count).toEqual(3); expect(strings.get(0)).toEqual('.notdef'); expect(strings.get(391)).toEqual('001.007'); }); it('parses top dict', function() { var topDict = cff.topDict; // 391 version 392 FullName 393 FamilyName 389 Weight 28416 UniqueID // -168 -218 1000 898 FontBBox 94 CharStrings 45 102 Private expect(topDict.getByName('version')).toEqual(391); expect(topDict.getByName('FullName')).toEqual(392); expect(topDict.getByName('FamilyName')).toEqual(393); expect(topDict.getByName('Weight')).toEqual(389); expect(topDict.getByName('UniqueID')).toEqual(28416); expect(topDict.getByName('FontBBox')).toEqual([-168, -218, 1000, 898]); expect(topDict.getByName('CharStrings')).toEqual(94); expect(topDict.getByName('Private')).toEqual([45, 102]); }); it('parses a CharString having cntrmask', function() { var bytes = new Uint8Array([0, 1, // count 1, // offsetSize 0, // offset[0] 38, // end 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 1, // hstem 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 3, // vstem 20, // cntrmask 22, 22, // fail if misparsed as hmoveto 14 // endchar ]); parser.bytes = bytes; var charStrings = parser.parseCharStrings(0).charStrings; expect(charStrings.count).toEqual(1); // shoudn't be sanitized expect(charStrings.get(0).length).toEqual(38); }); it('parses a CharString endchar with 4 args w/seac enabled', function() { var seacAnalysisState = SEAC_ANALYSIS_ENABLED; try { SEAC_ANALYSIS_ENABLED = true; var bytes = new Uint8Array([0, 1, // count 1, // offsetSize 0, // offset[0] 237, 247, 22, 247, 72, 204, 247, 86, 14]); parser.bytes = bytes; var result = parser.parseCharStrings(0); expect(result.charStrings.count).toEqual(1); expect(result.charStrings.get(0).length).toEqual(1); expect(result.seacs.length).toEqual(1); expect(result.seacs[0].length).toEqual(4); expect(result.seacs[0][0]).toEqual(130); expect(result.seacs[0][1]).toEqual(180); expect(result.seacs[0][2]).toEqual(65); expect(result.seacs[0][3]).toEqual(194); } finally { SEAC_ANALYSIS_ENABLED = seacAnalysisState; } }); it('parses a CharString endchar with 4 args w/seac disabled', function() { var seacAnalysisState = SEAC_ANALYSIS_ENABLED; try { SEAC_ANALYSIS_ENABLED = false; var bytes = new Uint8Array([0, 1, // count 1, // offsetSize 0, // offset[0] 237, 247, 22, 247, 72, 204, 247, 86, 14]); parser.bytes = bytes; var result = parser.parseCharStrings(0); expect(result.charStrings.count).toEqual(1); expect(result.charStrings.get(0).length).toEqual(9); expect(result.seacs.length).toEqual(0); } finally { SEAC_ANALYSIS_ENABLED = seacAnalysisState; } }); it('parses a CharString endchar no args', function() { var bytes = new Uint8Array([0, 1, // count 1, // offsetSize 0, // offset[0] 14]); parser.bytes = bytes; var result = parser.parseCharStrings(0); expect(result.charStrings.count).toEqual(1); expect(result.charStrings.get(0)[0]).toEqual(14); expect(result.seacs.length).toEqual(0); }); it('parses predefined charsets', function() { var charset = parser.parseCharsets(0, 0, null, true); expect(charset.predefined).toEqual(true); }); it('parses charset format 0', function() { // The first three bytes make the offset large enough to skip predefined. var bytes = new Uint8Array([0x00, 0x00, 0x00, 0x00, // format 0x00, 0x02 // sid/cid ]); parser.bytes = bytes; var charset = parser.parseCharsets(3, 2, new CFFStrings(), false); expect(charset.charset[1]).toEqual('exclam'); // CID font charset = parser.parseCharsets(3, 2, new CFFStrings(), true); expect(charset.charset[1]).toEqual(2); }); it('parses charset format 1', function() { // The first three bytes make the offset large enough to skip predefined. var bytes = new Uint8Array([0x00, 0x00, 0x00, 0x01, // format 0x00, 0x08, // sid/cid start 0x01 // sid/cid left ]); parser.bytes = bytes; var charset = parser.parseCharsets(3, 2, new CFFStrings(), false); expect(charset.charset).toEqual(['.notdef', 'quoteright', 'parenleft']); // CID font charset = parser.parseCharsets(3, 2, new CFFStrings(), true); expect(charset.charset).toEqual(['.notdef', 8, 9]); }); it('parses charset format 2', function() { // format 2 is the same as format 1 but the left is card16 // The first three bytes make the offset large enough to skip predefined. var bytes = new Uint8Array([0x00, 0x00, 0x00, 0x02, // format 0x00, 0x08, // sid/cid start 0x00, 0x01 // sid/cid left ]); parser.bytes = bytes; var charset = parser.parseCharsets(3, 2, new CFFStrings(), false); expect(charset.charset).toEqual(['.notdef', 'quoteright', 'parenleft']); // CID font charset = parser.parseCharsets(3, 2, new CFFStrings(), true); expect(charset.charset).toEqual(['.notdef', 8, 9]); }); it('parses encoding format 0', function() { // The first two bytes make the offset large enough to skip predefined. var bytes = new Uint8Array([0x00, 0x00, 0x00, // format 0x01, // count 0x08 // start ]); parser.bytes = bytes; var encoding = parser.parseEncoding(2, {}, new CFFStrings(), null); expect(encoding.encoding).toEqual({0x8: 1}); }); it('parses encoding format 1', function() { // The first two bytes make the offset large enough to skip predefined. var bytes = new Uint8Array([0x00, 0x00, 0x01, // format 0x01, // num ranges 0x07, // range1 start 0x01 // range2 left ]); parser.bytes = bytes; var encoding = parser.parseEncoding(2, {}, new CFFStrings(), null); expect(encoding.encoding).toEqual({0x7: 0x01, 0x08: 0x02}); }); it('parses fdselect format 0', function() { var bytes = new Uint8Array([0x00, // format 0x00, // gid: 0 fd: 0 0x01 // gid: 1 fd: 1 ]); parser.bytes = bytes; var fdSelect = parser.parseFDSelect(0, 2); expect(fdSelect.fdSelect).toEqual([0, 1]); }); it('parses fdselect format 3', function() { var bytes = new Uint8Array([0x03, // format 0x00, 0x02, // range count 0x00, 0x00, // first gid 0x09, // font dict 1 id 0x00, 0x02, // nex gid 0x0a, // font dict 2 gid 0x00, 0x04 // sentinel (last gid) ]); parser.bytes = bytes; var fdSelect = parser.parseFDSelect(0, 2); expect(fdSelect.fdSelect).toEqual([9, 9, 0xa, 0xa]); }); // TODO fdArray }); describe('CFFCompiler', function() { it('encodes integers', function() { var c = new CFFCompiler(); // all the examples from the spec expect(c.encodeInteger(0)).toEqual([0x8b]); expect(c.encodeInteger(100)).toEqual([0xef]); expect(c.encodeInteger(-100)).toEqual([0x27]); expect(c.encodeInteger(1000)).toEqual([0xfa, 0x7c]); expect(c.encodeInteger(-1000)).toEqual([0xfe, 0x7c]); expect(c.encodeInteger(10000)).toEqual([0x1c, 0x27, 0x10]); expect(c.encodeInteger(-10000)).toEqual([0x1c, 0xd8, 0xf0]); expect(c.encodeInteger(100000)).toEqual([0x1d, 0x00, 0x01, 0x86, 0xa0]); expect(c.encodeInteger(-100000)).toEqual([0x1d, 0xff, 0xfe, 0x79, 0x60]); }); it('encodes floats', function() { var c = new CFFCompiler(); expect(c.encodeFloat(-2.25)).toEqual([0x1e, 0xe2, 0xa2, 0x5f]); expect(c.encodeFloat(5e-11)).toEqual([0x1e, 0x5c, 0x11, 0xff]); }); // TODO a lot more compiler tests }); describe('Type1Parser', function() { it('splits tokens', function() { var stream = new StringStream('/BlueValues[-17 0]noaccess def'); var parser = new Type1Parser(stream); expect(parser.getToken()).toEqual('/'); expect(parser.getToken()).toEqual('BlueValues'); expect(parser.getToken()).toEqual('['); expect(parser.getToken()).toEqual('-17'); expect(parser.getToken()).toEqual('0'); expect(parser.getToken()).toEqual(']'); expect(parser.getToken()).toEqual('noaccess'); expect(parser.getToken()).toEqual('def'); expect(parser.getToken()).toEqual(null); }); it('handles glued tokens', function() { var stream = new StringStream('dup/CharStrings'); var parser = new Type1Parser(stream); expect(parser.getToken()).toEqual('dup'); expect(parser.getToken()).toEqual('/'); expect(parser.getToken()).toEqual('CharStrings'); }); it('ignores whitespace', function() { var stream = new StringStream('\nab c\t'); var parser = new Type1Parser(stream); expect(parser.getToken()).toEqual('ab'); expect(parser.getToken()).toEqual('c'); }); it('parses numbers', function() { var stream = new StringStream('123'); var parser = new Type1Parser(stream); expect(parser.readNumber()).toEqual(123); }); it('parses booleans', function() { var stream = new StringStream('true false'); var parser = new Type1Parser(stream); expect(parser.readBoolean()).toEqual(1); expect(parser.readBoolean()).toEqual(0); }); it('parses number arrays', function() { var stream = new StringStream('[1 2]'); var parser = new Type1Parser(stream); expect(parser.readNumberArray()).toEqual([1, 2]); // Variation on spacing. stream = new StringStream('[ 1 2 ]'); parser = new Type1Parser(stream); expect(parser.readNumberArray()).toEqual([1, 2]); }); it('skips comments', function() { var stream = new StringStream( '%!PS-AdobeFont-1.0: CMSY10 003.002\n' + '%%Title: CMSY10\n' + '%Version: 003.002\n' + 'FontDirectory'); var parser = new Type1Parser(stream); expect(parser.getToken()).toEqual('FontDirectory'); }); it('parses font program', function() { var stream = new StringStream( '/ExpansionFactor 99\n' + '/Subrs 1 array\n' + 'dup 0 1 RD x noaccess put\n'+ 'end\n' + '/CharStrings 46 dict dup begin\n' + '/.notdef 1 RD x ND' + '\n' + 'end'); var parser = new Type1Parser(stream); var program = parser.extractFontProgram(); expect(program.charstrings.length).toEqual(1); expect(program.properties.privateData.ExpansionFactor).toEqual(99); }); it('parses font header font matrix', function() { var stream = new StringStream( '/FontMatrix [0.001 0 0 0.001 0 0 ]readonly def\n'); var parser = new Type1Parser(stream); var props = {}; var program = parser.extractFontHeader(props); expect(props.fontMatrix).toEqual([0.001, 0, 0, 0.001, 0, 0]); }); it('parses font header encoding', function() { var stream = new StringStream( '/Encoding 256 array\n' + '0 1 255 {1 index exch /.notdef put} for\n' + 'dup 33 /arrowright put\n' + 'readonly def\n'); var parser = new Type1Parser(stream); var props = { overridableEncoding: true }; var program = parser.extractFontHeader(props); expect(props.builtInEncoding[33]).toEqual('arrowright'); }); }); });
apache-2.0
itmustbejj/chef-server
oc-chef-pedant/spec/api/knife/roles/from_file_spec.rb
1384
# Copyright: Copyright (c) 2012 Opscode, Inc. # License: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'pedant/rspec/knife_util' require 'securerandom' describe 'knife', :knife do context 'role' do context 'from file ROLE' do include Pedant::RSpec::KnifeUtil include Pedant::RSpec::KnifeUtil::Role let(:command) { "knife role from file #{role_name}.json -c #{knife_config}" } after(:each) { knife "role delete #{role_name} -c #{knife_config} --yes" } context 'with existing role' do context 'as an admin' do let(:requestor) { knife_admin } it 'should succeed' do assume_fixture_file! # Runs knife role from file should have_outcome :status => 0, :stderr => /Updated Role\s+#{role_name}/ end end end end end end
apache-2.0
rosogon/SeaCloudsPlatform
sla/sla-core/sla-tools/src/main/java/eu/atos/sla/parser/data/MessageResponse.java
1734
/** * Copyright 2014 Atos * Contact: Atos <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.atos.sla.parser.data; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; /** * A POJO Object that stores all the information from a Agreement * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlRootElement(name = "message") public class MessageResponse { @XmlAttribute(name = "code") private int code; @XmlAttribute(name = "message") private String message; @XmlAttribute(name = "elementId") private String elementId; public MessageResponse() { } public int getCode() { return code; } public void setCode(int code) { this.code = code; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public String getElementId() { return elementId; } public void setElementId(String elementId) { this.elementId = elementId; } }
apache-2.0
denzelsN/pinpoint
web/src/main/java/com/navercorp/pinpoint/web/view/LinkHistogramSummarySerializer.java
1789
/* * Copyright 2017 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.web.view; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.SerializerProvider; import com.navercorp.pinpoint.web.applicationmap.histogram.Histogram; import com.navercorp.pinpoint.web.applicationmap.link.LinkHistogramSummary; import java.io.IOException; import java.util.List; /** * @author HyunGil Jeong */ public class LinkHistogramSummarySerializer extends JsonSerializer<LinkHistogramSummary> { @Override public void serialize(LinkHistogramSummary summary, JsonGenerator jgen, SerializerProvider serializers) throws IOException, JsonProcessingException { jgen.writeStartObject(); String key = summary.getLinkName(); jgen.writeStringField("key", key); Histogram histogram = summary.getHistogram(); jgen.writeObjectField("histogram", histogram); List<ResponseTimeViewModel> timeSeriesHistogram = summary.getTimeSeriesHistogram(); jgen.writeObjectField("timeSeriesHistogram", timeSeriesHistogram); jgen.writeEndObject(); } }
apache-2.0
CesarPantoja/jena
jena-base/src/main/java/org/apache/jena/atlas/io/IO.java
11158
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.io; import java.io.* ; import java.nio.charset.Charset ; import java.nio.charset.StandardCharsets ; import java.util.zip.GZIPInputStream ; import java.util.zip.GZIPOutputStream ; import org.apache.jena.atlas.RuntimeIOException ; import org.apache.jena.atlas.lib.IRILib ; public class IO { public static final int EOF = -1 ; public static final int UNSET = -2 ; private static Charset utf8 = StandardCharsets.UTF_8 ; private static Charset ascii = StandardCharsets.US_ASCII ; /** Open an input stream to a file. * If the filename is null or "-", return System.in * If the filename ends in .gz, wrap in GZIPInputStream */ static public InputStream openFile(String filename) { try { return openFileEx(filename) ; } catch (IOException ex) { IO.exception(ex); return null ; } } /** Open an input stream to a file; do not mask IOExceptions. * If the filename is null or "-", return System.in * If the filename ends in .gz, wrap in GZIPInputStream * @param filename * @throws FileNotFoundException * @throws IOException */ static public InputStream openFileEx(String filename) throws IOException, FileNotFoundException { if ( filename == null || filename.equals("-") ) return System.in ; if ( filename.startsWith("file:") ) { filename = filename.substring("file:".length()) ; filename = IRILib.decode(filename) ; } InputStream in = new FileInputStream(filename) ; if ( filename.endsWith(".gz") ) in = new GZIPInputStream(in) ; return in ; } /** Open a UTF8 Reader for a file. * If the filename is null or "-", use System.in * If the filename ends in .gz, use GZIPInputStream */ static public Reader openFileUTF8(String filename) { return openFileReader(filename, utf8) ; } /** Open an ASCII Reader for a file. * If the filename is null or "-", use System.in * If the filename ends in .gz, use GZIPInputStream */ static public Reader openFileASCII(String filename) { return openFileReader(filename, ascii) ; } private static Reader openFileReader(String filename, Charset charset) { InputStream in = openFile(filename) ; return new InputStreamReader(in, charset) ; } /** Create an unbuffered reader that uses UTF-8 encoding */ static public Reader asUTF8(InputStream in) { return new InputStreamReader(in, utf8.newDecoder()); } /** Create a unbuffered reader that uses ASCII encoding */ static public Reader asASCII(InputStream in) { return new InputStreamReader(in, ascii.newDecoder()); } /** Create an buffered reader that uses UTF-8 encoding */ static public BufferedReader asBufferedUTF8(InputStream in) { return new BufferedReader(asUTF8(in)) ; } /** Create a writer that uses UTF-8 encoding */ static public Writer asUTF8(OutputStream out) { return new OutputStreamWriter(out, utf8.newEncoder()); } /** Create a writer that uses ASCII encoding */ static public Writer asASCII(OutputStream out) { return new OutputStreamWriter(out, ascii.newEncoder()); } /** Create a writer that uses UTF-8 encoding and is buffered. */ static public Writer asBufferedUTF8(OutputStream out) { Writer w = new OutputStreamWriter(out, utf8.newEncoder()); return new BufferingWriter(w) ; } /** Open a file for output - may include adding gzip processing. */ static public OutputStream openOutputFile(String filename) { try { return openOutputFileEx(filename) ; } catch (IOException ex) { IO.exception(ex) ; return null ; } } /** Open an input stream to a file; do not mask IOExceptions. * If the filename ends in .gz, wrap in GZIPOutputStream * @param filename * @throws FileNotFoundException If the output can't be opened. * @throws IOException for bad gzip encoded data */ static public OutputStream openOutputFileEx(String filename) throws FileNotFoundException,IOException { if ( filename == null || filename.equals("-") ) return System.out ; if ( filename.startsWith("file:") ) { filename = filename.substring("file:".length()) ; filename = IRILib.decode(filename) ; } OutputStream out = new FileOutputStream(filename) ; if ( filename.endsWith(".gz") ) out = new GZIPOutputStream(out) ; return out ; } /** Wrap in a general writer interface */ static public AWriter wrap(Writer w) { return Writer2.wrap(w) ; } /** Wrap in a general writer interface */ static public AWriter wrapUTF8(OutputStream out) { return wrap(asUTF8(out)) ; } /** Wrap in a general writer interface */ static public AWriter wrapASCII(OutputStream out) { return wrap(asASCII(out)) ; } /** Create a print writer that uses UTF-8 encoding */ static public PrintWriter asPrintWriterUTF8(OutputStream out) { return new PrintWriter(asUTF8(out)); } public static void close(org.apache.jena.atlas.lib.Closeable resource) { resource.close() ; } public static void closeSilent(org.apache.jena.atlas.lib.Closeable resource) { try { resource.close(); } catch (Exception ex) { } } public static void close(java.io.Closeable resource) { if ( resource == null ) return ; try { resource.close(); } catch (IOException ex) { exception(ex) ; } } public static void closeSilent(java.io.Closeable resource) { if ( resource == null ) return ; try { resource.close(); } catch (IOException ex) { } } public static void close(AWriter resource) { if ( resource == null ) return ; resource.close(); } public static void closeSilent(AWriter resource) { if ( resource == null ) return ; try { resource.close(); } catch (Exception ex) { } } public static void close(IndentedWriter resource) { if ( resource == null ) return ; resource.close(); } public static void closeSilent(IndentedWriter resource) { if ( resource == null ) return ; try { resource.close(); } catch (Exception ex) { } } /** Throw a RuntimeIOException - this function is guaraentted not to return normally */ public static void exception(String message) { throw new RuntimeIOException(message) ; } /** Throw a RuntimeIOException - this function is guaraentted not to return normally */ public static void exception(IOException ex) { throw new RuntimeIOException(ex) ; } /** Throw a RuntimeIOException - this function is guaraentted not to return normally */ public static void exception(String msg, IOException ex) { throw new RuntimeIOException(msg, ex) ; } public static void flush(OutputStream out) { if ( out == null ) return ; try { out.flush(); } catch (IOException ex) { exception(ex) ; } } public static void flush(Writer out) { if ( out == null ) return ; try { out.flush(); } catch (IOException ex) { exception(ex) ; } } public static void flush(AWriter out) { if ( out == null ) return ; out.flush(); } private static final int BUFFER_SIZE = 32*1024 ; public static byte[] readWholeFile(InputStream in) { try(ByteArrayOutputStream out = new ByteArrayOutputStream(BUFFER_SIZE)) { byte buff[] = new byte[BUFFER_SIZE] ; while (true) { int l = in.read(buff) ; if ( l <= 0 ) break ; out.write(buff, 0, l) ; } return out.toByteArray() ; } catch (IOException ex) { exception(ex) ; return null ; } } /** Read a whole file as UTF-8 * @param filename * @return String * @throws IOException */ public static String readWholeFileAsUTF8(String filename) throws IOException { try ( InputStream in = new FileInputStream(filename) ) { return readWholeFileAsUTF8(in) ; } } /** Read a whole stream as UTF-8 * * @param in InputStream to be read * @return String * @throws IOException */ public static String readWholeFileAsUTF8(InputStream in) throws IOException { // Don't buffer - we're going to read in large chunks anyway try ( Reader r = asUTF8(in) ) { return readWholeFileAsUTF8(r) ; } } /** Read a whole file as UTF-8 * * @param r * @return String The whole file * @throws IOException */ // Private worker as we are trying to force UTF-8. private static String readWholeFileAsUTF8(Reader r) throws IOException { try(StringWriter sw = new StringWriter(BUFFER_SIZE)) { char buff[] = new char[BUFFER_SIZE]; for (;;) { int l = r.read(buff); if (l < 0) break; sw.write(buff, 0, l); } return sw.toString(); } } public static String uniqueFilename(String directory, String base, String ext) { File d = new File(directory) ; if ( !d.exists() ) throw new IllegalArgumentException("Not found: " + directory) ; try { String fn0 = d.getCanonicalPath() + File.separator + base ; String fn = fn0 ; int x = 1 ; while (true) { if ( ext != null ) fn = fn + "."+ext ; File f = new File(fn) ; if ( ! f.exists() ) return fn ; fn = fn0 + "-" + (x++) ; } } catch (IOException e) { IO.exception(e) ; return null ; } } }
apache-2.0
njuneau/maven
maven-resolver-provider/src/test/java/org/apache/maven/repository/internal/DefaultModelResolverTest.java
8097
package org.apache.maven.repository.internal; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.net.MalformedURLException; import java.util.Arrays; import org.apache.maven.model.Dependency; import org.apache.maven.model.Parent; import org.apache.maven.model.resolution.ModelResolver; import org.apache.maven.model.resolution.UnresolvableModelException; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import org.eclipse.aether.impl.ArtifactResolver; import org.eclipse.aether.impl.RemoteRepositoryManager; import org.eclipse.aether.impl.VersionRangeResolver; /** * Test cases for the default {@code ModelResolver} implementation. * * @author Christian Schulte * @since 3.5.0 */ public final class DefaultModelResolverTest extends AbstractRepositoryTestCase { /** * Creates a new {@code DefaultModelResolverTest} instance. */ public DefaultModelResolverTest() { super(); } public void testResolveParentThrowsUnresolvableModelExceptionWhenNotFound() throws Exception { final Parent parent = new Parent(); parent.setGroupId( "ut.simple" ); parent.setArtifactId( "artifact" ); parent.setVersion( "0" ); try { this.newModelResolver().resolveModel( parent ); fail( "Expected 'UnresolvableModelException' not thrown." ); } catch ( final UnresolvableModelException e ) { assertNotNull( e.getMessage() ); assertTrue( e.getMessage().startsWith( "Could not find artifact ut.simple:artifact:pom:0 in repo" ) ); } } public void testResolveParentThrowsUnresolvableModelExceptionWhenNoMatchingVersionFound() throws Exception { final Parent parent = new Parent(); parent.setGroupId( "ut.simple" ); parent.setArtifactId( "artifact" ); parent.setVersion( "[2.0,2.1)" ); try { this.newModelResolver().resolveModel( parent ); fail( "Expected 'UnresolvableModelException' not thrown." ); } catch ( final UnresolvableModelException e ) { assertEquals( "No versions matched the requested parent version range '[2.0,2.1)'", e.getMessage() ); } } public void testResolveParentThrowsUnresolvableModelExceptionWhenUsingRangesWithoutUpperBound() throws Exception { final Parent parent = new Parent(); parent.setGroupId( "ut.simple" ); parent.setArtifactId( "artifact" ); parent.setVersion( "[1.0,)" ); try { this.newModelResolver().resolveModel( parent ); fail( "Expected 'UnresolvableModelException' not thrown." ); } catch ( final UnresolvableModelException e ) { assertEquals( "The requested parent version range '[1.0,)' does not specify an upper bound", e.getMessage() ); } } public void testResolveParentSuccessfullyResolvesExistingParentWithoutRange() throws Exception { final Parent parent = new Parent(); parent.setGroupId( "ut.simple" ); parent.setArtifactId( "artifact" ); parent.setVersion( "1.0" ); assertNotNull( this.newModelResolver().resolveModel( parent ) ); assertEquals( "1.0", parent.getVersion() ); } public void testResolveParentSuccessfullyResolvesExistingParentUsingHighestVersion() throws Exception { final Parent parent = new Parent(); parent.setGroupId( "ut.simple" ); parent.setArtifactId( "artifact" ); parent.setVersion( "(,2.0)" ); assertNotNull( this.newModelResolver().resolveModel( parent ) ); assertEquals( "1.0", parent.getVersion() ); } public void testResolveDependencyThrowsUnresolvableModelExceptionWhenNotFound() throws Exception { final Dependency dependency = new Dependency(); dependency.setGroupId( "ut.simple" ); dependency.setArtifactId( "artifact" ); dependency.setVersion( "0" ); try { this.newModelResolver().resolveModel( dependency ); fail( "Expected 'UnresolvableModelException' not thrown." ); } catch ( final UnresolvableModelException e ) { assertNotNull( e.getMessage() ); assertTrue( e.getMessage().startsWith( "Could not find artifact ut.simple:artifact:pom:0 in repo" ) ); } } public void testResolveDependencyThrowsUnresolvableModelExceptionWhenNoMatchingVersionFound() throws Exception { final Dependency dependency = new Dependency(); dependency.setGroupId( "ut.simple" ); dependency.setArtifactId( "artifact" ); dependency.setVersion( "[2.0,2.1)" ); try { this.newModelResolver().resolveModel( dependency ); fail( "Expected 'UnresolvableModelException' not thrown." ); } catch ( final UnresolvableModelException e ) { assertEquals( "No versions matched the requested dependency version range '[2.0,2.1)'", e.getMessage() ); } } public void testResolveDependencyThrowsUnresolvableModelExceptionWhenUsingRangesWithoutUpperBound() throws Exception { final Dependency dependency = new Dependency(); dependency.setGroupId( "ut.simple" ); dependency.setArtifactId( "artifact" ); dependency.setVersion( "[1.0,)" ); try { this.newModelResolver().resolveModel( dependency ); fail( "Expected 'UnresolvableModelException' not thrown." ); } catch ( final UnresolvableModelException e ) { assertEquals( "The requested dependency version range '[1.0,)' does not specify an upper bound", e.getMessage() ); } } public void testResolveDependencySuccessfullyResolvesExistingDependencyWithoutRange() throws Exception { final Dependency dependency = new Dependency(); dependency.setGroupId( "ut.simple" ); dependency.setArtifactId( "artifact" ); dependency.setVersion( "1.0" ); assertNotNull( this.newModelResolver().resolveModel( dependency ) ); assertEquals( "1.0", dependency.getVersion() ); } public void testResolveDependencySuccessfullyResolvesExistingDependencyUsingHighestVersion() throws Exception { final Dependency dependency = new Dependency(); dependency.setGroupId( "ut.simple" ); dependency.setArtifactId( "artifact" ); dependency.setVersion( "(,2.0)" ); assertNotNull( this.newModelResolver().resolveModel( dependency ) ); assertEquals( "1.0", dependency.getVersion() ); } private ModelResolver newModelResolver() throws ComponentLookupException, MalformedURLException { return new DefaultModelResolver( this.session, null, this.getClass().getName(), lookup( ArtifactResolver.class ), lookup( VersionRangeResolver.class ), lookup( RemoteRepositoryManager.class ), Arrays.asList( newTestRepository() ) ); } }
apache-2.0
ibinti/intellij-community
java/idea-ui/src/com/intellij/facet/impl/ui/libraries/LibraryDownloadSettings.java
6383
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.facet.impl.ui.libraries; import com.intellij.framework.library.DownloadableLibraryFileDescription; import com.intellij.framework.library.DownloadableLibraryType; import com.intellij.framework.library.FrameworkLibraryVersion; import com.intellij.framework.library.LibraryVersionProperties; import com.intellij.openapi.roots.JavadocOrderRootType; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.ui.configuration.libraryEditor.NewLibraryEditor; import com.intellij.openapi.roots.ui.configuration.projectRoot.LibrariesContainer; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.download.DownloadableFileDescription; import com.intellij.util.download.DownloadableFileService; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author nik */ public class LibraryDownloadSettings { private final FrameworkLibraryVersion myVersion; private final DownloadableLibraryType myLibraryType; private String myLibrariesPath; private final String myLibraryName; private final boolean myDownloadSources; private final boolean myDownloadJavaDocs; private final LibrariesContainer.LibraryLevel myLibraryLevel; private final List<? extends DownloadableLibraryFileDescription> mySelectedDownloads; public LibraryDownloadSettings(@NotNull FrameworkLibraryVersion libraryVersion, @Nullable DownloadableLibraryType libraryType, final LibrariesContainer.LibraryLevel libraryLevel, final String downloadedLibrariesPath) { this(libraryVersion, libraryType, downloadedLibrariesPath, libraryVersion.getDefaultLibraryName(), libraryLevel, getRequiredFiles(libraryVersion.getFiles()), true, true); } public LibraryDownloadSettings(@NotNull FrameworkLibraryVersion libraryVersion, @Nullable DownloadableLibraryType libraryType, @NotNull String librariesPath, @NotNull String libraryName, @NotNull LibrariesContainer.LibraryLevel libraryLevel, @NotNull List<? extends DownloadableLibraryFileDescription> selectedDownloads, boolean downloadSources, boolean downloadJavaDocs) { myVersion = libraryVersion; myLibraryType = libraryType; myLibrariesPath = librariesPath; myLibraryName = libraryName; myDownloadSources = downloadSources; myDownloadJavaDocs = downloadJavaDocs; myLibraryLevel = libraryLevel; mySelectedDownloads = selectedDownloads; } private static List<? extends DownloadableLibraryFileDescription> getRequiredFiles(List<? extends DownloadableLibraryFileDescription> files) { return ContainerUtil.filter(files, (Condition<DownloadableLibraryFileDescription>)description -> !description.isOptional()); } @NotNull public FrameworkLibraryVersion getVersion() { return myVersion; } public boolean isDownloadJavaDocs() { return myDownloadJavaDocs; } public boolean isDownloadSources() { return myDownloadSources; } public String getLibraryName() { return myLibraryName; } public String getDirectoryForDownloadedLibrariesPath() { return myLibrariesPath; } public List<? extends DownloadableLibraryFileDescription> getSelectedDownloads() { return mySelectedDownloads; } @NotNull public LibrariesContainer.LibraryLevel getLibraryLevel() { return myLibraryLevel; } public DownloadableLibraryType getLibraryType() { return myLibraryType; } @Nullable public NewLibraryEditor download(JComponent parent, @Nullable String rootPath) { final List<DownloadableFileDescription> toDownload = new ArrayList<>(mySelectedDownloads); Map<DownloadableFileDescription, OrderRootType> rootTypes = new HashMap<>(); for (DownloadableLibraryFileDescription description : mySelectedDownloads) { final DownloadableFileDescription sources = description.getSourcesDescription(); if (myDownloadSources && sources != null) { toDownload.add(sources); rootTypes.put(sources, OrderRootType.SOURCES); } final DownloadableFileDescription docs = description.getDocumentationDescription(); if (myDownloadJavaDocs && docs != null) { toDownload.add(docs); rootTypes.put(docs, JavadocOrderRootType.getInstance()); } } String path = rootPath != null && !FileUtil.isAbsolute(myLibrariesPath) ? new File(rootPath, myLibrariesPath).getPath() : myLibrariesPath; List<Pair<VirtualFile,DownloadableFileDescription>> downloaded = DownloadableFileService.getInstance() .createDownloader(toDownload, myLibraryName + " Library") .downloadWithProgress(path, null, parent); if (downloaded == null) { return null; } final NewLibraryEditor libraryEditor; if (myLibraryType != null) { libraryEditor = new NewLibraryEditor(myLibraryType, new LibraryVersionProperties(myVersion.getVersionString())); } else { libraryEditor = new NewLibraryEditor(); } libraryEditor.setName(myLibraryName); for (Pair<VirtualFile, DownloadableFileDescription> pair : downloaded) { final OrderRootType rootType = rootTypes.containsKey(pair.getSecond()) ? rootTypes.get(pair.getSecond()) : OrderRootType.CLASSES; libraryEditor.addRoot(pair.getFirst(), rootType); } return libraryEditor; } }
apache-2.0
swizzley/origin
pkg/authorization/registry/policy/strategy.go
2681
package policy import ( "fmt" kapi "github.com/GoogleCloudPlatform/kubernetes/pkg/api" "github.com/GoogleCloudPlatform/kubernetes/pkg/fields" "github.com/GoogleCloudPlatform/kubernetes/pkg/labels" "github.com/GoogleCloudPlatform/kubernetes/pkg/registry/generic" "github.com/GoogleCloudPlatform/kubernetes/pkg/runtime" "github.com/GoogleCloudPlatform/kubernetes/pkg/util/fielderrors" authorizationapi "github.com/openshift/origin/pkg/authorization/api" "github.com/openshift/origin/pkg/authorization/api/validation" ) // strategy implements behavior for nodes type strategy struct { runtime.ObjectTyper } // Strategy is the default logic that applies when creating and updating Policy objects. var Strategy = strategy{kapi.Scheme} // NamespaceScoped is true for policies. func (strategy) NamespaceScoped() bool { return true } // AllowCreateOnUpdate is false for policies. func (strategy) AllowCreateOnUpdate() bool { return false } func (strategy) AllowUnconditionalUpdate() bool { return false } func (strategy) GenerateName(base string) string { return base } // PrepareForCreate clears fields that are not allowed to be set by end users on creation. func (strategy) PrepareForCreate(obj runtime.Object) { policy := obj.(*authorizationapi.Policy) policy.Name = authorizationapi.PolicyName } // PrepareForUpdate clears fields that are not allowed to be set by end users on update. func (strategy) PrepareForUpdate(obj, old runtime.Object) { _ = obj.(*authorizationapi.Policy) } // Validate validates a new policy. func (strategy) Validate(ctx kapi.Context, obj runtime.Object) fielderrors.ValidationErrorList { return validation.ValidateLocalPolicy(obj.(*authorizationapi.Policy)) } // ValidateUpdate is the default update validation for an end user. func (strategy) ValidateUpdate(ctx kapi.Context, obj, old runtime.Object) fielderrors.ValidationErrorList { return validation.ValidateLocalPolicyUpdate(obj.(*authorizationapi.Policy), old.(*authorizationapi.Policy)) } // Matcher returns a generic matcher for a given label and field selector. func Matcher(label labels.Selector, field fields.Selector) generic.Matcher { return &generic.SelectionPredicate{ Label: label, Field: field, GetAttrs: func(obj runtime.Object) (labels.Set, fields.Set, error) { policy, ok := obj.(*authorizationapi.Policy) if !ok { return nil, nil, fmt.Errorf("not a policy") } return labels.Set(policy.ObjectMeta.Labels), SelectableFields(policy), nil }, } } // SelectableFields returns a label set that represents the object func SelectableFields(policy *authorizationapi.Policy) fields.Set { return fields.Set{ "name": policy.Name, } }
apache-2.0
lojies/kubernetes
pkg/kubelet/stats/cri_stats_provider_test.go
44347
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package stats import ( "math/rand" "os" "path/filepath" "runtime" "strings" "testing" "time" gomock "github.com/golang/mock/gomock" cadvisorfs "github.com/google/cadvisor/fs" cadvisorapiv2 "github.com/google/cadvisor/info/v2" "github.com/stretchr/testify/assert" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/uuid" runtimeapi "k8s.io/cri-api/pkg/apis/runtime/v1alpha2" critest "k8s.io/cri-api/pkg/apis/testing" statsapi "k8s.io/kubelet/pkg/apis/stats/v1alpha1" cadvisortest "k8s.io/kubernetes/pkg/kubelet/cadvisor/testing" "k8s.io/kubernetes/pkg/kubelet/cm" kubecontainertest "k8s.io/kubernetes/pkg/kubelet/container/testing" "k8s.io/kubernetes/pkg/kubelet/kuberuntime" "k8s.io/kubernetes/pkg/kubelet/leaky" kubepodtest "k8s.io/kubernetes/pkg/kubelet/pod/testing" serverstats "k8s.io/kubernetes/pkg/kubelet/server/stats" "k8s.io/kubernetes/pkg/volume" ) const ( offsetInodeUsage = iota offsetUsage ) const ( seedRoot = 0 seedKubelet = 200 seedMisc = 300 seedSandbox0 = 1000 seedContainer0 = 2000 seedSandbox1 = 3000 seedContainer1 = 4000 seedContainer2 = 5000 seedSandbox2 = 6000 seedContainer3 = 7000 seedSandbox3 = 8000 ) const ( pName0 = "pod0" pName1 = "pod1" pName2 = "pod2" ) const ( cName0 = "container0-name" cName1 = "container1-name" cName2 = "container2-name" cName3 = "container3-name" cName5 = "container5-name" cName6 = "container6-name" cName7 = "container7-name" cName8 = "container8-name" cName9 = "container9-name" ) func TestCRIListPodStats(t *testing.T) { var ( imageFsMountpoint = "/test/mount/point" unknownMountpoint = "/unknown/mount/point" imageFsInfo = getTestFsInfo(2000) rootFsInfo = getTestFsInfo(1000) sandbox0 = makeFakePodSandbox("sandbox0-name", "sandbox0-uid", "sandbox0-ns", false) sandbox0Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox0.PodSandboxStatus.Metadata.Uid)) container0 = makeFakeContainer(sandbox0, cName0, 0, false) containerStats0 = makeFakeContainerStats(container0, imageFsMountpoint) containerLogStats0 = makeFakeLogStats(1000) container1 = makeFakeContainer(sandbox0, cName1, 0, false) containerStats1 = makeFakeContainerStats(container1, unknownMountpoint) containerLogStats1 = makeFakeLogStats(2000) sandbox1 = makeFakePodSandbox("sandbox1-name", "sandbox1-uid", "sandbox1-ns", false) sandbox1Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox1.PodSandboxStatus.Metadata.Uid)) container2 = makeFakeContainer(sandbox1, cName2, 0, false) containerStats2 = makeFakeContainerStats(container2, imageFsMountpoint) containerLogStats2 = makeFakeLogStats(3000) sandbox2 = makeFakePodSandbox("sandbox2-name", "sandbox2-uid", "sandbox2-ns", false) sandbox2Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox2.PodSandboxStatus.Metadata.Uid)) container3 = makeFakeContainer(sandbox2, cName3, 0, true) containerStats3 = makeFakeContainerStats(container3, imageFsMountpoint) container4 = makeFakeContainer(sandbox2, cName3, 1, false) containerStats4 = makeFakeContainerStats(container4, imageFsMountpoint) containerLogStats4 = makeFakeLogStats(4000) // Running pod with a terminated container and a running container sandbox3 = makeFakePodSandbox("sandbox3-name", "sandbox3-uid", "sandbox3-ns", false) sandbox3Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox3.PodSandboxStatus.Metadata.Uid)) container5 = makeFakeContainer(sandbox3, cName5, 0, true) containerStats5 = makeFakeContainerStats(container5, imageFsMountpoint) containerLogStats5 = makeFakeLogStats(5000) container8 = makeFakeContainer(sandbox3, cName8, 0, false) containerStats8 = makeFakeContainerStats(container8, imageFsMountpoint) containerLogStats8 = makeFakeLogStats(6000) // Terminated pod sandbox sandbox4 = makeFakePodSandbox("sandbox1-name", "sandbox1-uid", "sandbox1-ns", true) container6 = makeFakeContainer(sandbox4, cName6, 0, true) containerStats6 = makeFakeContainerStats(container6, imageFsMountpoint) // Terminated pod sandbox5 = makeFakePodSandbox("sandbox1-name", "sandbox5-uid", "sandbox1-ns", true) container7 = makeFakeContainer(sandbox5, cName7, 0, true) containerStats7 = makeFakeContainerStats(container7, imageFsMountpoint) podLogName0 = "pod-log-0" podLogName1 = "pod-log-1" podLogStats0 = makeFakeLogStats(5000) podLogStats1 = makeFakeLogStats(6000) ) var ( mockCadvisor = new(cadvisortest.Mock) mockRuntimeCache = new(kubecontainertest.MockRuntimeCache) mockPodManager = new(kubepodtest.MockManager) resourceAnalyzer = new(fakeResourceAnalyzer) fakeRuntimeService = critest.NewFakeRuntimeService() fakeImageService = critest.NewFakeImageService() ) infos := map[string]cadvisorapiv2.ContainerInfo{ "/": getTestContainerInfo(seedRoot, "", "", ""), "/kubelet": getTestContainerInfo(seedKubelet, "", "", ""), "/system": getTestContainerInfo(seedMisc, "", "", ""), sandbox0.PodSandboxStatus.Id: getTestContainerInfo(seedSandbox0, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, leaky.PodInfraContainerName), sandbox0Cgroup: getTestContainerInfo(seedSandbox0, "", "", ""), container0.ContainerStatus.Id: getTestContainerInfo(seedContainer0, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, cName0), container1.ContainerStatus.Id: getTestContainerInfo(seedContainer1, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, cName1), sandbox1.PodSandboxStatus.Id: getTestContainerInfo(seedSandbox1, pName1, sandbox1.PodSandboxStatus.Metadata.Namespace, leaky.PodInfraContainerName), sandbox1Cgroup: getTestContainerInfo(seedSandbox1, "", "", ""), container2.ContainerStatus.Id: getTestContainerInfo(seedContainer2, pName1, sandbox1.PodSandboxStatus.Metadata.Namespace, cName2), sandbox2.PodSandboxStatus.Id: getTestContainerInfo(seedSandbox2, pName2, sandbox2.PodSandboxStatus.Metadata.Namespace, leaky.PodInfraContainerName), sandbox2Cgroup: getTestContainerInfo(seedSandbox2, "", "", ""), container4.ContainerStatus.Id: getTestContainerInfo(seedContainer3, pName2, sandbox2.PodSandboxStatus.Metadata.Namespace, cName3), sandbox3Cgroup: getTestContainerInfo(seedSandbox3, "", "", ""), } options := cadvisorapiv2.RequestOptions{ IdType: cadvisorapiv2.TypeName, Count: 2, Recursive: true, } mockCadvisor. On("ContainerInfoV2", "/", options).Return(infos, nil). On("RootFsInfo").Return(rootFsInfo, nil). On("GetDirFsInfo", imageFsMountpoint).Return(imageFsInfo, nil). On("GetDirFsInfo", unknownMountpoint).Return(cadvisorapiv2.FsInfo{}, cadvisorfs.ErrNoSuchDevice) fakeRuntimeService.SetFakeSandboxes([]*critest.FakePodSandbox{ sandbox0, sandbox1, sandbox2, sandbox3, sandbox4, sandbox5, }) fakeRuntimeService.SetFakeContainers([]*critest.FakeContainer{ container0, container1, container2, container3, container4, container5, container6, container7, container8, }) fakeRuntimeService.SetFakeContainerStats([]*runtimeapi.ContainerStats{ containerStats0, containerStats1, containerStats2, containerStats3, containerStats4, containerStats5, containerStats6, containerStats7, containerStats8, }) ephemeralVolumes := makeFakeVolumeStats([]string{"ephVolume1, ephVolumes2"}) persistentVolumes := makeFakeVolumeStats([]string{"persisVolume1, persisVolumes2"}) resourceAnalyzer.podVolumeStats = serverstats.PodVolumeStats{ EphemeralVolumes: ephemeralVolumes, PersistentVolumes: persistentVolumes, } fakeStats := map[string]*volume.Metrics{ kuberuntime.BuildContainerLogsDirectory("sandbox0-ns", "sandbox0-name", types.UID("sandbox0-uid"), cName0): containerLogStats0, kuberuntime.BuildContainerLogsDirectory("sandbox0-ns", "sandbox0-name", types.UID("sandbox0-uid"), cName1): containerLogStats1, kuberuntime.BuildContainerLogsDirectory("sandbox1-ns", "sandbox1-name", types.UID("sandbox1-uid"), cName2): containerLogStats2, kuberuntime.BuildContainerLogsDirectory("sandbox2-ns", "sandbox2-name", types.UID("sandbox2-uid"), cName3): containerLogStats4, kuberuntime.BuildContainerLogsDirectory("sandbox3-ns", "sandbox3-name", types.UID("sandbox3-uid"), cName5): containerLogStats5, kuberuntime.BuildContainerLogsDirectory("sandbox3-ns", "sandbox3-name", types.UID("sandbox3-uid"), cName8): containerLogStats8, filepath.Join(kuberuntime.BuildPodLogsDirectory("sandbox0-ns", "sandbox0-name", types.UID("sandbox0-uid")), podLogName0): podLogStats0, filepath.Join(kuberuntime.BuildPodLogsDirectory("sandbox1-ns", "sandbox1-name", types.UID("sandbox1-uid")), podLogName1): podLogStats1, } ctrl := gomock.NewController(t) defer ctrl.Finish() fakeOS := &kubecontainertest.FakeOS{} fakeOS.ReadDirFn = func(path string) ([]os.FileInfo, error) { var fileInfos []os.FileInfo mockFI := kubecontainertest.NewMockFileInfo(ctrl) switch path { case kuberuntime.BuildPodLogsDirectory("sandbox0-ns", "sandbox0-name", types.UID("sandbox0-uid")): mockFI.EXPECT().Name().Return(podLogName0) case kuberuntime.BuildPodLogsDirectory("sandbox1-ns", "sandbox1-name", types.UID("sandbox1-uid")): mockFI.EXPECT().Name().Return(podLogName1) default: return nil, nil } mockFI.EXPECT().IsDir().Return(false) fileInfos = append(fileInfos, mockFI) return fileInfos, nil } provider := NewCRIStatsProvider( mockCadvisor, resourceAnalyzer, mockPodManager, mockRuntimeCache, fakeRuntimeService, fakeImageService, NewFakeHostStatsProviderWithData(fakeStats, fakeOS), false, ) stats, err := provider.ListPodStats() assert := assert.New(t) assert.NoError(err) assert.Equal(4, len(stats)) podStatsMap := make(map[statsapi.PodReference]statsapi.PodStats) for _, s := range stats { podStatsMap[s.PodRef] = s } p0 := podStatsMap[statsapi.PodReference{Name: "sandbox0-name", UID: "sandbox0-uid", Namespace: "sandbox0-ns"}] assert.Equal(sandbox0.CreatedAt, p0.StartTime.UnixNano()) assert.Equal(2, len(p0.Containers)) checkEphemeralStorageStats(assert, p0, ephemeralVolumes, []*runtimeapi.ContainerStats{containerStats0, containerStats1}, []*volume.Metrics{containerLogStats0, containerLogStats1}, podLogStats0) containerStatsMap := make(map[string]statsapi.ContainerStats) for _, s := range p0.Containers { containerStatsMap[s.Name] = s } c0 := containerStatsMap[cName0] assert.Equal(container0.CreatedAt, c0.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c0, infos[container0.ContainerStatus.Id].Stats[0]) checkCRIAcceleratorStats(assert, c0, infos[container0.ContainerStatus.Id].Stats[0]) checkCRIRootfsStats(assert, c0, containerStats0, &imageFsInfo) checkCRILogsStats(assert, c0, &rootFsInfo, containerLogStats0) c1 := containerStatsMap[cName1] assert.Equal(container1.CreatedAt, c1.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c1, infos[container1.ContainerStatus.Id].Stats[0]) checkCRIAcceleratorStats(assert, c1, infos[container1.ContainerStatus.Id].Stats[0]) checkCRIRootfsStats(assert, c1, containerStats1, nil) checkCRILogsStats(assert, c1, &rootFsInfo, containerLogStats1) checkCRINetworkStats(assert, p0.Network, infos[sandbox0.PodSandboxStatus.Id].Stats[0].Network) checkCRIPodCPUAndMemoryStats(assert, p0, infos[sandbox0Cgroup].Stats[0]) p1 := podStatsMap[statsapi.PodReference{Name: "sandbox1-name", UID: "sandbox1-uid", Namespace: "sandbox1-ns"}] assert.Equal(sandbox1.CreatedAt, p1.StartTime.UnixNano()) assert.Equal(1, len(p1.Containers)) checkEphemeralStorageStats(assert, p1, ephemeralVolumes, []*runtimeapi.ContainerStats{containerStats2}, []*volume.Metrics{containerLogStats2}, podLogStats1) c2 := p1.Containers[0] assert.Equal(cName2, c2.Name) assert.Equal(container2.CreatedAt, c2.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c2, infos[container2.ContainerStatus.Id].Stats[0]) checkCRIAcceleratorStats(assert, c2, infos[container2.ContainerStatus.Id].Stats[0]) checkCRIRootfsStats(assert, c2, containerStats2, &imageFsInfo) checkCRILogsStats(assert, c2, &rootFsInfo, containerLogStats2) checkCRINetworkStats(assert, p1.Network, infos[sandbox1.PodSandboxStatus.Id].Stats[0].Network) checkCRIPodCPUAndMemoryStats(assert, p1, infos[sandbox1Cgroup].Stats[0]) p2 := podStatsMap[statsapi.PodReference{Name: "sandbox2-name", UID: "sandbox2-uid", Namespace: "sandbox2-ns"}] assert.Equal(sandbox2.CreatedAt, p2.StartTime.UnixNano()) assert.Equal(1, len(p2.Containers)) checkEphemeralStorageStats(assert, p2, ephemeralVolumes, []*runtimeapi.ContainerStats{containerStats4}, []*volume.Metrics{containerLogStats4}, nil) c3 := p2.Containers[0] assert.Equal(cName3, c3.Name) assert.Equal(container4.CreatedAt, c3.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c3, infos[container4.ContainerStatus.Id].Stats[0]) checkCRIAcceleratorStats(assert, c3, infos[container4.ContainerStatus.Id].Stats[0]) checkCRIRootfsStats(assert, c3, containerStats4, &imageFsInfo) checkCRILogsStats(assert, c3, &rootFsInfo, containerLogStats4) checkCRINetworkStats(assert, p2.Network, infos[sandbox2.PodSandboxStatus.Id].Stats[0].Network) checkCRIPodCPUAndMemoryStats(assert, p2, infos[sandbox2Cgroup].Stats[0]) p3 := podStatsMap[statsapi.PodReference{Name: "sandbox3-name", UID: "sandbox3-uid", Namespace: "sandbox3-ns"}] assert.Equal(sandbox3.CreatedAt, p3.StartTime.UnixNano()) assert.Equal(1, len(p3.Containers)) c8 := p3.Containers[0] assert.Equal(cName8, c8.Name) assert.Equal(container8.CreatedAt, c8.StartTime.UnixNano()) assert.NotNil(c8.CPU.Time) assert.NotNil(c8.Memory.Time) checkCRIPodCPUAndMemoryStats(assert, p3, infos[sandbox3Cgroup].Stats[0]) mockCadvisor.AssertExpectations(t) } func TestAcceleratorUsageStatsCanBeDisabled(t *testing.T) { var ( imageFsMountpoint = "/test/mount/point" unknownMountpoint = "/unknown/mount/point" imageFsInfo = getTestFsInfo(2000) rootFsInfo = getTestFsInfo(1000) sandbox0 = makeFakePodSandbox("sandbox0-name", "sandbox0-uid", "sandbox0-ns", false) sandbox0Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox0.PodSandboxStatus.Metadata.Uid)) container0 = makeFakeContainer(sandbox0, cName0, 0, false) containerStats0 = makeFakeContainerStats(container0, imageFsMountpoint) container1 = makeFakeContainer(sandbox0, cName1, 0, false) containerStats1 = makeFakeContainerStats(container1, unknownMountpoint) ) var ( mockCadvisor = new(cadvisortest.Mock) mockRuntimeCache = new(kubecontainertest.MockRuntimeCache) mockPodManager = new(kubepodtest.MockManager) resourceAnalyzer = new(fakeResourceAnalyzer) fakeRuntimeService = critest.NewFakeRuntimeService() fakeImageService = critest.NewFakeImageService() ) infos := map[string]cadvisorapiv2.ContainerInfo{ "/": getTestContainerInfo(seedRoot, "", "", ""), "/kubelet": getTestContainerInfo(seedKubelet, "", "", ""), "/system": getTestContainerInfo(seedMisc, "", "", ""), sandbox0.PodSandboxStatus.Id: getTestContainerInfo(seedSandbox0, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, leaky.PodInfraContainerName), sandbox0Cgroup: getTestContainerInfo(seedSandbox0, "", "", ""), container0.ContainerStatus.Id: getTestContainerInfo(seedContainer0, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, cName0), container1.ContainerStatus.Id: getTestContainerInfo(seedContainer1, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, cName1), } options := cadvisorapiv2.RequestOptions{ IdType: cadvisorapiv2.TypeName, Count: 2, Recursive: true, } mockCadvisor. On("ContainerInfoV2", "/", options).Return(infos, nil). On("RootFsInfo").Return(rootFsInfo, nil). On("GetDirFsInfo", imageFsMountpoint).Return(imageFsInfo, nil). On("GetDirFsInfo", unknownMountpoint).Return(cadvisorapiv2.FsInfo{}, cadvisorfs.ErrNoSuchDevice) fakeRuntimeService.SetFakeSandboxes([]*critest.FakePodSandbox{ sandbox0, }) fakeRuntimeService.SetFakeContainers([]*critest.FakeContainer{ container0, container1, }) fakeRuntimeService.SetFakeContainerStats([]*runtimeapi.ContainerStats{ containerStats0, containerStats1, }) ephemeralVolumes := makeFakeVolumeStats([]string{"ephVolume1, ephVolumes2"}) persistentVolumes := makeFakeVolumeStats([]string{"persisVolume1, persisVolumes2"}) resourceAnalyzer.podVolumeStats = serverstats.PodVolumeStats{ EphemeralVolumes: ephemeralVolumes, PersistentVolumes: persistentVolumes, } provider := NewCRIStatsProvider( mockCadvisor, resourceAnalyzer, mockPodManager, mockRuntimeCache, fakeRuntimeService, fakeImageService, NewFakeHostStatsProvider(), true, // this is what the test is actually testing ) stats, err := provider.ListPodStats() assert := assert.New(t) assert.NoError(err) assert.Equal(1, len(stats)) podStatsMap := make(map[statsapi.PodReference]statsapi.PodStats) for _, s := range stats { podStatsMap[s.PodRef] = s } p0 := podStatsMap[statsapi.PodReference{Name: "sandbox0-name", UID: "sandbox0-uid", Namespace: "sandbox0-ns"}] assert.Equal(sandbox0.CreatedAt, p0.StartTime.UnixNano()) assert.Equal(2, len(p0.Containers)) containerStatsMap := make(map[string]statsapi.ContainerStats) for _, s := range p0.Containers { containerStatsMap[s.Name] = s } c0 := containerStatsMap[cName0] assert.Equal(container0.CreatedAt, c0.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c0, infos[container0.ContainerStatus.Id].Stats[0]) assert.Nil(c0.Accelerators) c1 := containerStatsMap[cName1] assert.Equal(container1.CreatedAt, c1.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c1, infos[container1.ContainerStatus.Id].Stats[0]) assert.Nil(c1.Accelerators) checkCRIPodCPUAndMemoryStats(assert, p0, infos[sandbox0Cgroup].Stats[0]) mockCadvisor.AssertExpectations(t) } func TestCRIListPodCPUAndMemoryStats(t *testing.T) { var ( imageFsMountpoint = "/test/mount/point" unknownMountpoint = "/unknown/mount/point" sandbox0 = makeFakePodSandbox("sandbox0-name", "sandbox0-uid", "sandbox0-ns", false) sandbox0Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox0.PodSandboxStatus.Metadata.Uid)) container0 = makeFakeContainer(sandbox0, cName0, 0, false) containerStats0 = makeFakeContainerStats(container0, imageFsMountpoint) container1 = makeFakeContainer(sandbox0, cName1, 0, false) containerStats1 = makeFakeContainerStats(container1, unknownMountpoint) sandbox1 = makeFakePodSandbox("sandbox1-name", "sandbox1-uid", "sandbox1-ns", false) sandbox1Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox1.PodSandboxStatus.Metadata.Uid)) container2 = makeFakeContainer(sandbox1, cName2, 0, false) containerStats2 = makeFakeContainerStats(container2, imageFsMountpoint) sandbox2 = makeFakePodSandbox("sandbox2-name", "sandbox2-uid", "sandbox2-ns", false) sandbox2Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox2.PodSandboxStatus.Metadata.Uid)) container3 = makeFakeContainer(sandbox2, cName3, 0, true) containerStats3 = makeFakeContainerStats(container3, imageFsMountpoint) container4 = makeFakeContainer(sandbox2, cName3, 1, false) containerStats4 = makeFakeContainerStats(container4, imageFsMountpoint) // Running pod with a terminated container and a running container sandbox3 = makeFakePodSandbox("sandbox3-name", "sandbox3-uid", "sandbox3-ns", false) sandbox3Cgroup = "/" + cm.GetPodCgroupNameSuffix(types.UID(sandbox3.PodSandboxStatus.Metadata.Uid)) container5 = makeFakeContainer(sandbox3, cName5, 0, true) containerStats5 = makeFakeContainerStats(container5, imageFsMountpoint) container8 = makeFakeContainer(sandbox3, cName8, 0, false) containerStats8 = makeFakeContainerStats(container8, imageFsMountpoint) // Terminated pod sandbox sandbox4 = makeFakePodSandbox("sandbox1-name", "sandbox1-uid", "sandbox1-ns", true) container6 = makeFakeContainer(sandbox4, cName6, 0, true) containerStats6 = makeFakeContainerStats(container6, imageFsMountpoint) // Terminated pod sandbox5 = makeFakePodSandbox("sandbox1-name", "sandbox5-uid", "sandbox1-ns", true) container7 = makeFakeContainer(sandbox5, cName7, 0, true) containerStats7 = makeFakeContainerStats(container7, imageFsMountpoint) // A pod that cadvisor returns no stats sandbox6 = makeFakePodSandbox("sandbox6-name", "sandbox6-uid", "sandbox6-ns", false) container9 = makeFakeContainer(sandbox6, cName9, 0, false) containerStats9 = makeFakeContainerStats(container9, imageFsMountpoint) ) var ( mockCadvisor = new(cadvisortest.Mock) mockRuntimeCache = new(kubecontainertest.MockRuntimeCache) mockPodManager = new(kubepodtest.MockManager) resourceAnalyzer = new(fakeResourceAnalyzer) fakeRuntimeService = critest.NewFakeRuntimeService() ) infos := map[string]cadvisorapiv2.ContainerInfo{ "/": getTestContainerInfo(seedRoot, "", "", ""), "/kubelet": getTestContainerInfo(seedKubelet, "", "", ""), "/system": getTestContainerInfo(seedMisc, "", "", ""), sandbox0.PodSandboxStatus.Id: getTestContainerInfo(seedSandbox0, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, leaky.PodInfraContainerName), sandbox0Cgroup: getTestContainerInfo(seedSandbox0, "", "", ""), container0.ContainerStatus.Id: getTestContainerInfo(seedContainer0, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, cName0), container1.ContainerStatus.Id: getTestContainerInfo(seedContainer1, pName0, sandbox0.PodSandboxStatus.Metadata.Namespace, cName1), sandbox1.PodSandboxStatus.Id: getTestContainerInfo(seedSandbox1, pName1, sandbox1.PodSandboxStatus.Metadata.Namespace, leaky.PodInfraContainerName), sandbox1Cgroup: getTestContainerInfo(seedSandbox1, "", "", ""), container2.ContainerStatus.Id: getTestContainerInfo(seedContainer2, pName1, sandbox1.PodSandboxStatus.Metadata.Namespace, cName2), sandbox2.PodSandboxStatus.Id: getTestContainerInfo(seedSandbox2, pName2, sandbox2.PodSandboxStatus.Metadata.Namespace, leaky.PodInfraContainerName), sandbox2Cgroup: getTestContainerInfo(seedSandbox2, "", "", ""), container4.ContainerStatus.Id: getTestContainerInfo(seedContainer3, pName2, sandbox2.PodSandboxStatus.Metadata.Namespace, cName3), sandbox3Cgroup: getTestContainerInfo(seedSandbox3, "", "", ""), } options := cadvisorapiv2.RequestOptions{ IdType: cadvisorapiv2.TypeName, Count: 2, Recursive: true, } mockCadvisor. On("ContainerInfoV2", "/", options).Return(infos, nil) fakeRuntimeService.SetFakeSandboxes([]*critest.FakePodSandbox{ sandbox0, sandbox1, sandbox2, sandbox3, sandbox4, sandbox5, sandbox6, }) fakeRuntimeService.SetFakeContainers([]*critest.FakeContainer{ container0, container1, container2, container3, container4, container5, container6, container7, container8, container9, }) fakeRuntimeService.SetFakeContainerStats([]*runtimeapi.ContainerStats{ containerStats0, containerStats1, containerStats2, containerStats3, containerStats4, containerStats5, containerStats6, containerStats7, containerStats8, containerStats9, }) ephemeralVolumes := makeFakeVolumeStats([]string{"ephVolume1, ephVolumes2"}) persistentVolumes := makeFakeVolumeStats([]string{"persisVolume1, persisVolumes2"}) resourceAnalyzer.podVolumeStats = serverstats.PodVolumeStats{ EphemeralVolumes: ephemeralVolumes, PersistentVolumes: persistentVolumes, } provider := NewCRIStatsProvider( mockCadvisor, resourceAnalyzer, mockPodManager, mockRuntimeCache, fakeRuntimeService, nil, NewFakeHostStatsProvider(), false, ) stats, err := provider.ListPodCPUAndMemoryStats() assert := assert.New(t) assert.NoError(err) assert.Equal(5, len(stats)) podStatsMap := make(map[statsapi.PodReference]statsapi.PodStats) for _, s := range stats { podStatsMap[s.PodRef] = s } p0 := podStatsMap[statsapi.PodReference{Name: "sandbox0-name", UID: "sandbox0-uid", Namespace: "sandbox0-ns"}] assert.Equal(sandbox0.CreatedAt, p0.StartTime.UnixNano()) assert.Equal(2, len(p0.Containers)) assert.Nil(p0.EphemeralStorage) assert.Nil(p0.VolumeStats) assert.Nil(p0.Network) checkCRIPodCPUAndMemoryStats(assert, p0, infos[sandbox0Cgroup].Stats[0]) containerStatsMap := make(map[string]statsapi.ContainerStats) for _, s := range p0.Containers { containerStatsMap[s.Name] = s } c0 := containerStatsMap[cName0] assert.Equal(container0.CreatedAt, c0.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c0, infos[container0.ContainerStatus.Id].Stats[0]) assert.Nil(c0.Rootfs) assert.Nil(c0.Logs) assert.Nil(c0.Accelerators) assert.Nil(c0.UserDefinedMetrics) c1 := containerStatsMap[cName1] assert.Equal(container1.CreatedAt, c1.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c1, infos[container1.ContainerStatus.Id].Stats[0]) assert.Nil(c1.Rootfs) assert.Nil(c1.Logs) assert.Nil(c1.Accelerators) assert.Nil(c1.UserDefinedMetrics) p1 := podStatsMap[statsapi.PodReference{Name: "sandbox1-name", UID: "sandbox1-uid", Namespace: "sandbox1-ns"}] assert.Equal(sandbox1.CreatedAt, p1.StartTime.UnixNano()) assert.Equal(1, len(p1.Containers)) assert.Nil(p1.EphemeralStorage) assert.Nil(p1.VolumeStats) assert.Nil(p1.Network) checkCRIPodCPUAndMemoryStats(assert, p1, infos[sandbox1Cgroup].Stats[0]) c2 := p1.Containers[0] assert.Equal(cName2, c2.Name) assert.Equal(container2.CreatedAt, c2.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c2, infos[container2.ContainerStatus.Id].Stats[0]) assert.Nil(c2.Rootfs) assert.Nil(c2.Logs) assert.Nil(c2.Accelerators) assert.Nil(c2.UserDefinedMetrics) p2 := podStatsMap[statsapi.PodReference{Name: "sandbox2-name", UID: "sandbox2-uid", Namespace: "sandbox2-ns"}] assert.Equal(sandbox2.CreatedAt, p2.StartTime.UnixNano()) assert.Equal(1, len(p2.Containers)) assert.Nil(p2.EphemeralStorage) assert.Nil(p2.VolumeStats) assert.Nil(p2.Network) checkCRIPodCPUAndMemoryStats(assert, p2, infos[sandbox2Cgroup].Stats[0]) c3 := p2.Containers[0] assert.Equal(cName3, c3.Name) assert.Equal(container4.CreatedAt, c3.StartTime.UnixNano()) checkCRICPUAndMemoryStats(assert, c3, infos[container4.ContainerStatus.Id].Stats[0]) assert.Nil(c2.Rootfs) assert.Nil(c2.Logs) assert.Nil(c2.Accelerators) assert.Nil(c2.UserDefinedMetrics) p3 := podStatsMap[statsapi.PodReference{Name: "sandbox3-name", UID: "sandbox3-uid", Namespace: "sandbox3-ns"}] assert.Equal(sandbox3.CreatedAt, p3.StartTime.UnixNano()) assert.Equal(1, len(p3.Containers)) c8 := p3.Containers[0] assert.Equal(cName8, c8.Name) assert.Equal(container8.CreatedAt, c8.StartTime.UnixNano()) assert.NotNil(c8.CPU.Time) assert.NotNil(c8.Memory.Time) checkCRIPodCPUAndMemoryStats(assert, p3, infos[sandbox3Cgroup].Stats[0]) p6 := podStatsMap[statsapi.PodReference{Name: "sandbox6-name", UID: "sandbox6-uid", Namespace: "sandbox6-ns"}] assert.Equal(sandbox6.CreatedAt, p6.StartTime.UnixNano()) assert.Equal(1, len(p6.Containers)) c9 := p6.Containers[0] assert.Equal(cName9, c9.Name) assert.Equal(container9.CreatedAt, c9.StartTime.UnixNano()) assert.NotNil(c9.CPU.Time) assert.Equal(containerStats9.Cpu.Timestamp, p6.CPU.Time.UnixNano()) assert.NotNil(c9.Memory.Time) assert.Equal(containerStats9.Memory.Timestamp, p6.Memory.Time.UnixNano()) mockCadvisor.AssertExpectations(t) } func TestCRIImagesFsStats(t *testing.T) { var ( imageFsMountpoint = "/test/mount/point" imageFsInfo = getTestFsInfo(2000) imageFsUsage = makeFakeImageFsUsage(imageFsMountpoint) ) var ( mockCadvisor = new(cadvisortest.Mock) mockRuntimeCache = new(kubecontainertest.MockRuntimeCache) mockPodManager = new(kubepodtest.MockManager) resourceAnalyzer = new(fakeResourceAnalyzer) fakeRuntimeService = critest.NewFakeRuntimeService() fakeImageService = critest.NewFakeImageService() ) mockCadvisor.On("GetDirFsInfo", imageFsMountpoint).Return(imageFsInfo, nil) fakeImageService.SetFakeFilesystemUsage([]*runtimeapi.FilesystemUsage{ imageFsUsage, }) provider := NewCRIStatsProvider( mockCadvisor, resourceAnalyzer, mockPodManager, mockRuntimeCache, fakeRuntimeService, fakeImageService, NewFakeHostStatsProvider(), false, ) stats, err := provider.ImageFsStats() assert := assert.New(t) assert.NoError(err) assert.Equal(imageFsUsage.Timestamp, stats.Time.UnixNano()) assert.Equal(imageFsInfo.Available, *stats.AvailableBytes) assert.Equal(imageFsInfo.Capacity, *stats.CapacityBytes) assert.Equal(imageFsInfo.InodesFree, stats.InodesFree) assert.Equal(imageFsInfo.Inodes, stats.Inodes) assert.Equal(imageFsUsage.UsedBytes.Value, *stats.UsedBytes) assert.Equal(imageFsUsage.InodesUsed.Value, *stats.InodesUsed) mockCadvisor.AssertExpectations(t) } func makeFakePodSandbox(name, uid, namespace string, terminated bool) *critest.FakePodSandbox { p := &critest.FakePodSandbox{ PodSandboxStatus: runtimeapi.PodSandboxStatus{ Metadata: &runtimeapi.PodSandboxMetadata{ Name: name, Uid: uid, Namespace: namespace, }, State: runtimeapi.PodSandboxState_SANDBOX_READY, CreatedAt: time.Now().UnixNano(), }, } if terminated { p.PodSandboxStatus.State = runtimeapi.PodSandboxState_SANDBOX_NOTREADY } p.PodSandboxStatus.Id = strings.ReplaceAll(string(uuid.NewUUID()), "-", "") return p } func makeFakeContainer(sandbox *critest.FakePodSandbox, name string, attempt uint32, terminated bool) *critest.FakeContainer { sandboxID := sandbox.PodSandboxStatus.Id c := &critest.FakeContainer{ SandboxID: sandboxID, ContainerStatus: runtimeapi.ContainerStatus{ Metadata: &runtimeapi.ContainerMetadata{Name: name, Attempt: attempt}, Image: &runtimeapi.ImageSpec{}, ImageRef: "fake-image-ref", CreatedAt: time.Now().UnixNano(), }, } c.ContainerStatus.Labels = map[string]string{ "io.kubernetes.pod.name": sandbox.Metadata.Name, "io.kubernetes.pod.uid": sandbox.Metadata.Uid, "io.kubernetes.pod.namespace": sandbox.Metadata.Namespace, "io.kubernetes.container.name": name, } if terminated { c.ContainerStatus.State = runtimeapi.ContainerState_CONTAINER_EXITED } else { c.ContainerStatus.State = runtimeapi.ContainerState_CONTAINER_RUNNING } c.ContainerStatus.Id = strings.ReplaceAll(string(uuid.NewUUID()), "-", "") return c } func makeFakeContainerStats(container *critest.FakeContainer, imageFsMountpoint string) *runtimeapi.ContainerStats { containerStats := &runtimeapi.ContainerStats{ Attributes: &runtimeapi.ContainerAttributes{ Id: container.ContainerStatus.Id, Metadata: container.ContainerStatus.Metadata, }, WritableLayer: &runtimeapi.FilesystemUsage{ Timestamp: time.Now().UnixNano(), FsId: &runtimeapi.FilesystemIdentifier{Mountpoint: imageFsMountpoint}, UsedBytes: &runtimeapi.UInt64Value{Value: rand.Uint64() / 100}, InodesUsed: &runtimeapi.UInt64Value{Value: rand.Uint64() / 100}, }, } if container.State == runtimeapi.ContainerState_CONTAINER_EXITED { containerStats.Cpu = nil containerStats.Memory = nil } else { containerStats.Cpu = &runtimeapi.CpuUsage{ Timestamp: time.Now().UnixNano(), UsageCoreNanoSeconds: &runtimeapi.UInt64Value{Value: rand.Uint64()}, } containerStats.Memory = &runtimeapi.MemoryUsage{ Timestamp: time.Now().UnixNano(), WorkingSetBytes: &runtimeapi.UInt64Value{Value: rand.Uint64()}, } } return containerStats } func makeFakeImageFsUsage(fsMountpoint string) *runtimeapi.FilesystemUsage { return &runtimeapi.FilesystemUsage{ Timestamp: time.Now().UnixNano(), FsId: &runtimeapi.FilesystemIdentifier{Mountpoint: fsMountpoint}, UsedBytes: &runtimeapi.UInt64Value{Value: rand.Uint64()}, InodesUsed: &runtimeapi.UInt64Value{Value: rand.Uint64()}, } } func makeFakeVolumeStats(volumeNames []string) []statsapi.VolumeStats { volumes := make([]statsapi.VolumeStats, len(volumeNames)) availableBytes := rand.Uint64() capacityBytes := rand.Uint64() usedBytes := rand.Uint64() / 100 inodes := rand.Uint64() inodesFree := rand.Uint64() inodesUsed := rand.Uint64() / 100 for i, name := range volumeNames { fsStats := statsapi.FsStats{ Time: metav1.NewTime(time.Now()), AvailableBytes: &availableBytes, CapacityBytes: &capacityBytes, UsedBytes: &usedBytes, Inodes: &inodes, InodesFree: &inodesFree, InodesUsed: &inodesUsed, } volumes[i] = statsapi.VolumeStats{ FsStats: fsStats, Name: name, } } return volumes } func checkCRICPUAndMemoryStats(assert *assert.Assertions, actual statsapi.ContainerStats, cs *cadvisorapiv2.ContainerStats) { assert.Equal(cs.Timestamp.UnixNano(), actual.CPU.Time.UnixNano()) assert.Equal(cs.Cpu.Usage.Total, *actual.CPU.UsageCoreNanoSeconds) assert.Equal(cs.CpuInst.Usage.Total, *actual.CPU.UsageNanoCores) assert.Equal(cs.Memory.Usage, *actual.Memory.UsageBytes) assert.Equal(cs.Memory.WorkingSet, *actual.Memory.WorkingSetBytes) assert.Equal(cs.Memory.RSS, *actual.Memory.RSSBytes) assert.Equal(cs.Memory.ContainerData.Pgfault, *actual.Memory.PageFaults) assert.Equal(cs.Memory.ContainerData.Pgmajfault, *actual.Memory.MajorPageFaults) } func checkCRIAcceleratorStats(assert *assert.Assertions, actual statsapi.ContainerStats, cs *cadvisorapiv2.ContainerStats) { assert.Equal(len(cs.Accelerators), len(actual.Accelerators)) for i := range cs.Accelerators { assert.Equal(cs.Accelerators[i].Make, actual.Accelerators[i].Make) assert.Equal(cs.Accelerators[i].Model, actual.Accelerators[i].Model) assert.Equal(cs.Accelerators[i].ID, actual.Accelerators[i].ID) assert.Equal(cs.Accelerators[i].MemoryTotal, actual.Accelerators[i].MemoryTotal) assert.Equal(cs.Accelerators[i].MemoryUsed, actual.Accelerators[i].MemoryUsed) assert.Equal(cs.Accelerators[i].DutyCycle, actual.Accelerators[i].DutyCycle) } } func checkCRIRootfsStats(assert *assert.Assertions, actual statsapi.ContainerStats, cs *runtimeapi.ContainerStats, imageFsInfo *cadvisorapiv2.FsInfo) { assert.Equal(cs.WritableLayer.Timestamp, actual.Rootfs.Time.UnixNano()) if imageFsInfo != nil { assert.Equal(imageFsInfo.Available, *actual.Rootfs.AvailableBytes) assert.Equal(imageFsInfo.Capacity, *actual.Rootfs.CapacityBytes) assert.Equal(*imageFsInfo.InodesFree, *actual.Rootfs.InodesFree) assert.Equal(*imageFsInfo.Inodes, *actual.Rootfs.Inodes) } else { assert.Nil(actual.Rootfs.AvailableBytes) assert.Nil(actual.Rootfs.CapacityBytes) assert.Nil(actual.Rootfs.InodesFree) assert.Nil(actual.Rootfs.Inodes) } assert.Equal(cs.WritableLayer.UsedBytes.Value, *actual.Rootfs.UsedBytes) assert.Equal(cs.WritableLayer.InodesUsed.Value, *actual.Rootfs.InodesUsed) } func checkCRILogsStats(assert *assert.Assertions, actual statsapi.ContainerStats, rootFsInfo *cadvisorapiv2.FsInfo, logStats *volume.Metrics) { assert.Equal(rootFsInfo.Timestamp, actual.Logs.Time.Time) assert.Equal(rootFsInfo.Available, *actual.Logs.AvailableBytes) assert.Equal(rootFsInfo.Capacity, *actual.Logs.CapacityBytes) assert.Equal(*rootFsInfo.InodesFree, *actual.Logs.InodesFree) assert.Equal(*rootFsInfo.Inodes, *actual.Logs.Inodes) assert.Equal(uint64(logStats.Used.Value()), *actual.Logs.UsedBytes) assert.Equal(uint64(logStats.InodesUsed.Value()), *actual.Logs.InodesUsed) } func checkEphemeralStorageStats(assert *assert.Assertions, actual statsapi.PodStats, volumes []statsapi.VolumeStats, containers []*runtimeapi.ContainerStats, containerLogStats []*volume.Metrics, podLogStats *volume.Metrics) { var totalUsed, inodesUsed uint64 for _, container := range containers { totalUsed = totalUsed + container.WritableLayer.UsedBytes.Value inodesUsed = inodesUsed + container.WritableLayer.InodesUsed.Value } for _, volume := range volumes { totalUsed = totalUsed + *volume.FsStats.UsedBytes inodesUsed = inodesUsed + *volume.FsStats.InodesUsed } for _, logStats := range containerLogStats { totalUsed = totalUsed + uint64(logStats.Used.Value()) inodesUsed = inodesUsed + uint64(logStats.InodesUsed.Value()) } if podLogStats != nil { totalUsed = totalUsed + uint64(podLogStats.Used.Value()) inodesUsed = inodesUsed + uint64(podLogStats.InodesUsed.Value()) } assert.Equal(int(totalUsed), int(*actual.EphemeralStorage.UsedBytes)) assert.Equal(int(inodesUsed), int(*actual.EphemeralStorage.InodesUsed)) } func checkCRINetworkStats(assert *assert.Assertions, actual *statsapi.NetworkStats, expected *cadvisorapiv2.NetworkStats) { assert.Equal(expected.Interfaces[0].RxBytes, *actual.RxBytes) assert.Equal(expected.Interfaces[0].RxErrors, *actual.RxErrors) assert.Equal(expected.Interfaces[0].TxBytes, *actual.TxBytes) assert.Equal(expected.Interfaces[0].TxErrors, *actual.TxErrors) } func checkCRIPodCPUAndMemoryStats(assert *assert.Assertions, actual statsapi.PodStats, cs *cadvisorapiv2.ContainerStats) { if runtime.GOOS != "linux" { return } assert.Equal(cs.Timestamp.UnixNano(), actual.CPU.Time.UnixNano()) assert.Equal(cs.Cpu.Usage.Total, *actual.CPU.UsageCoreNanoSeconds) assert.Equal(cs.CpuInst.Usage.Total, *actual.CPU.UsageNanoCores) assert.Equal(cs.Memory.Usage, *actual.Memory.UsageBytes) assert.Equal(cs.Memory.WorkingSet, *actual.Memory.WorkingSetBytes) assert.Equal(cs.Memory.RSS, *actual.Memory.RSSBytes) assert.Equal(cs.Memory.ContainerData.Pgfault, *actual.Memory.PageFaults) assert.Equal(cs.Memory.ContainerData.Pgmajfault, *actual.Memory.MajorPageFaults) } func makeFakeLogStats(seed int) *volume.Metrics { m := &volume.Metrics{} m.Used = resource.NewQuantity(int64(seed+offsetUsage), resource.BinarySI) m.InodesUsed = resource.NewQuantity(int64(seed+offsetInodeUsage), resource.BinarySI) return m } func TestGetContainerUsageNanoCores(t *testing.T) { var value0 uint64 var value1 uint64 = 10000000000 // Test with a large container of 100+ CPUs var value2 uint64 = 188427786383 tests := []struct { desc string cpuUsageCache map[string]*cpuUsageRecord stats *runtimeapi.ContainerStats expected *uint64 }{ { desc: "should return nil if stats is nil", cpuUsageCache: map[string]*cpuUsageRecord{}, }, { desc: "should return nil if cpu stats is nil", cpuUsageCache: map[string]*cpuUsageRecord{}, stats: &runtimeapi.ContainerStats{ Attributes: &runtimeapi.ContainerAttributes{ Id: "1", }, Cpu: nil, }, }, { desc: "should return nil if usageCoreNanoSeconds is nil", cpuUsageCache: map[string]*cpuUsageRecord{}, stats: &runtimeapi.ContainerStats{ Attributes: &runtimeapi.ContainerAttributes{ Id: "1", }, Cpu: &runtimeapi.CpuUsage{ Timestamp: 1, UsageCoreNanoSeconds: nil, }, }, }, { desc: "should return nil if cpu stats is not cached yet", cpuUsageCache: map[string]*cpuUsageRecord{}, stats: &runtimeapi.ContainerStats{ Attributes: &runtimeapi.ContainerAttributes{ Id: "1", }, Cpu: &runtimeapi.CpuUsage{ Timestamp: 1, UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 10000000000, }, }, }, }, { desc: "should return zero value if cached cpu stats is equal to current value", stats: &runtimeapi.ContainerStats{ Attributes: &runtimeapi.ContainerAttributes{ Id: "1", }, Cpu: &runtimeapi.CpuUsage{ Timestamp: 1, UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 10000000000, }, }, }, cpuUsageCache: map[string]*cpuUsageRecord{ "1": { stats: &runtimeapi.CpuUsage{ Timestamp: 0, UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 10000000000, }, }, }, }, expected: &value0, }, { desc: "should return correct value if cached cpu stats is not equal to current value", stats: &runtimeapi.ContainerStats{ Attributes: &runtimeapi.ContainerAttributes{ Id: "1", }, Cpu: &runtimeapi.CpuUsage{ Timestamp: int64(time.Second / time.Nanosecond), UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 20000000000, }, }, }, cpuUsageCache: map[string]*cpuUsageRecord{ "1": { stats: &runtimeapi.CpuUsage{ Timestamp: 0, UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 10000000000, }, }, }, }, expected: &value1, }, { desc: "should return correct value if elapsed UsageCoreNanoSeconds exceeds 18446744073", stats: &runtimeapi.ContainerStats{ Attributes: &runtimeapi.ContainerAttributes{ Id: "1", }, Cpu: &runtimeapi.CpuUsage{ Timestamp: int64(time.Second / time.Nanosecond), UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 68172016162105, }, }, }, cpuUsageCache: map[string]*cpuUsageRecord{ "1": { stats: &runtimeapi.CpuUsage{ Timestamp: 0, UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 67983588375722, }, }, }, }, expected: &value2, }, { desc: "should return nil if cpuacct is reset to 0 in a live container", stats: &runtimeapi.ContainerStats{ Attributes: &runtimeapi.ContainerAttributes{ Id: "1", }, Cpu: &runtimeapi.CpuUsage{ Timestamp: 2, UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 0, }, }, }, cpuUsageCache: map[string]*cpuUsageRecord{ "1": { stats: &runtimeapi.CpuUsage{ Timestamp: 1, UsageCoreNanoSeconds: &runtimeapi.UInt64Value{ Value: 10000000000, }, }, }, }, expected: nil, }, } for _, test := range tests { provider := &criStatsProvider{cpuUsageCache: test.cpuUsageCache} // Before the update, the cached value should be nil cached := provider.getContainerUsageNanoCores(test.stats) assert.Nil(t, cached) // Update the cache and get the latest value. real := provider.getAndUpdateContainerUsageNanoCores(test.stats) assert.Equal(t, test.expected, real, test.desc) // After the update, the cached value should be up-to-date cached = provider.getContainerUsageNanoCores(test.stats) assert.Equal(t, test.expected, cached, test.desc) } } func TestExtractIDFromCgroupPath(t *testing.T) { tests := []struct { cgroupPath string expected string }{ { cgroupPath: "/kubepods/burstable/pod2fc932ce-fdcc-454b-97bd-aadfdeb4c340/9be25294016e2dc0340dd605ce1f57b492039b267a6a618a7ad2a7a58a740f32", expected: "9be25294016e2dc0340dd605ce1f57b492039b267a6a618a7ad2a7a58a740f32", }, { cgroupPath: "/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fc932ce_fdcc_454b_97bd_aadfdeb4c340.slice/cri-containerd-aaefb9d8feed2d453b543f6d928cede7a4dbefa6a0ae7c9b990dd234c56e93b9.scope", expected: "aaefb9d8feed2d453b543f6d928cede7a4dbefa6a0ae7c9b990dd234c56e93b9", }, { cgroupPath: "/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fc932ce_fdcc_454b_97bd_aadfdeb4c340.slice/cri-o-aaefb9d8feed2d453b543f6d928cede7a4dbefa6a0ae7c9b990dd234c56e93b9.scope", expected: "aaefb9d8feed2d453b543f6d928cede7a4dbefa6a0ae7c9b990dd234c56e93b9", }, } for _, test := range tests { id := extractIDFromCgroupPath(test.cgroupPath) assert.Equal(t, test.expected, id) } }
apache-2.0
tarasane/h2o-3
h2o-py/h2o/transforms/__init__.py
23
__author__ = 'spencer'
apache-2.0
chmouel/gofabric8
vendor/k8s.io/kubernetes/pkg/kubectl/resource/builder_test.go
39947
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package resource import ( "bytes" "fmt" "io" "io/ioutil" "net/http" "net/http/httptest" "os" "reflect" "strings" "testing" "github.com/ghodss/yaml" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/api/meta" "k8s.io/kubernetes/pkg/api/resource" "k8s.io/kubernetes/pkg/api/testapi" apitesting "k8s.io/kubernetes/pkg/api/testing" "k8s.io/kubernetes/pkg/api/unversioned" "k8s.io/kubernetes/pkg/api/v1" "k8s.io/kubernetes/pkg/apimachinery/registered" "k8s.io/kubernetes/pkg/client/restclient/fake" "k8s.io/kubernetes/pkg/runtime" "k8s.io/kubernetes/pkg/runtime/serializer/streaming" utilerrors "k8s.io/kubernetes/pkg/util/errors" utiltesting "k8s.io/kubernetes/pkg/util/testing" "k8s.io/kubernetes/pkg/watch" "k8s.io/kubernetes/pkg/watch/versioned" ) func stringBody(body string) io.ReadCloser { return ioutil.NopCloser(bytes.NewReader([]byte(body))) } func watchBody(events ...watch.Event) string { buf := &bytes.Buffer{} codec := testapi.Default.Codec() enc := versioned.NewEncoder(streaming.NewEncoder(buf, codec), codec) for _, e := range events { enc.Encode(&e) } return buf.String() } func fakeClient() ClientMapper { return ClientMapperFunc(func(*meta.RESTMapping) (RESTClient, error) { return &fake.RESTClient{}, nil }) } func fakeClientWith(testName string, t *testing.T, data map[string]string) ClientMapper { return ClientMapperFunc(func(*meta.RESTMapping) (RESTClient, error) { return &fake.RESTClient{ NegotiatedSerializer: testapi.Default.NegotiatedSerializer(), Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) { p := req.URL.Path q := req.URL.RawQuery if len(q) != 0 { p = p + "?" + q } body, ok := data[p] if !ok { t.Fatalf("%s: unexpected request: %s (%s)\n%#v", testName, p, req.URL, req) } header := http.Header{} header.Set("Content-Type", runtime.ContentTypeJSON) return &http.Response{ StatusCode: http.StatusOK, Header: header, Body: stringBody(body), }, nil }), }, nil }) } func testData() (*api.PodList, *api.ServiceList) { pods := &api.PodList{ ListMeta: unversioned.ListMeta{ ResourceVersion: "15", }, Items: []api.Pod{ { ObjectMeta: api.ObjectMeta{Name: "foo", Namespace: "test", ResourceVersion: "10"}, Spec: apitesting.DeepEqualSafePodSpec(), }, { ObjectMeta: api.ObjectMeta{Name: "bar", Namespace: "test", ResourceVersion: "11"}, Spec: apitesting.DeepEqualSafePodSpec(), }, }, } svc := &api.ServiceList{ ListMeta: unversioned.ListMeta{ ResourceVersion: "16", }, Items: []api.Service{ { ObjectMeta: api.ObjectMeta{Name: "baz", Namespace: "test", ResourceVersion: "12"}, Spec: api.ServiceSpec{ Type: "ClusterIP", SessionAffinity: "None", }, }, }, } return pods, svc } func streamTestData() (io.Reader, *api.PodList, *api.ServiceList) { pods, svc := testData() r, w := io.Pipe() go func() { defer w.Close() w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), pods))) w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), svc))) }() return r, pods, svc } func JSONToYAMLOrDie(in []byte) []byte { data, err := yaml.JSONToYAML(in) if err != nil { panic(err) } return data } func streamYAMLTestData() (io.Reader, *api.PodList, *api.ServiceList) { pods, svc := testData() r, w := io.Pipe() go func() { defer w.Close() w.Write(JSONToYAMLOrDie([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), pods)))) w.Write([]byte("\n---\n")) w.Write(JSONToYAMLOrDie([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), svc)))) }() return r, pods, svc } func streamTestObject(obj runtime.Object) io.Reader { r, w := io.Pipe() go func() { defer w.Close() w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), obj))) }() return r } type testVisitor struct { InjectErr error Infos []*Info } func (v *testVisitor) Handle(info *Info, err error) error { if err != nil { return err } v.Infos = append(v.Infos, info) return v.InjectErr } func (v *testVisitor) Objects() []runtime.Object { objects := []runtime.Object{} for i := range v.Infos { objects = append(objects, v.Infos[i].Object) } return objects } var aPod string = ` { "kind": "Pod", "apiVersion": "` + registered.GroupOrDie(api.GroupName).GroupVersion.String() + `", "metadata": { "name": "busybox{id}", "labels": { "name": "busybox{id}" } }, "spec": { "containers": [ { "name": "busybox", "image": "busybox", "command": [ "sleep", "3600" ], "imagePullPolicy": "IfNotPresent" } ], "restartPolicy": "Always" } } ` var aRC string = ` { "kind": "ReplicationController", "apiVersion": "` + registered.GroupOrDie(api.GroupName).GroupVersion.String() + `", "metadata": { "name": "busybox{id}", "labels": { "app": "busybox" } }, "spec": { "replicas": 1, "template": { "metadata": { "name": "busybox{id}", "labels": { "app": "busybox{id}" } }, "spec": { "containers": [ { "name": "busybox", "image": "busybox", "command": [ "sleep", "3600" ], "imagePullPolicy": "IfNotPresent" } ], "restartPolicy": "Always" } } } } ` func TestPathBuilderAndVersionedObjectNotDefaulted(t *testing.T) { b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{"../../../test/fixtures/pkg/kubectl/builder/kitten-rc.yaml"}}) test := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || !singleItemImplied || len(test.Infos) != 1 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } info := test.Infos[0] if info.Name != "update-demo-kitten" || info.Namespace != "" || info.Object == nil { t.Errorf("unexpected info: %#v", info) } version, ok := info.VersionedObject.(*v1.ReplicationController) // versioned object does not have defaulting applied if info.VersionedObject == nil || !ok || version.Spec.Replicas != nil { t.Errorf("unexpected versioned object: %#v", info.VersionedObject) } } func TestNodeBuilder(t *testing.T) { node := &api.Node{ ObjectMeta: api.ObjectMeta{Name: "node1", Namespace: "should-not-have", ResourceVersion: "10"}, Spec: api.NodeSpec{}, Status: api.NodeStatus{ Capacity: api.ResourceList{ api.ResourceCPU: resource.MustParse("1000m"), api.ResourceMemory: resource.MustParse("1Mi"), }, }, } r, w := io.Pipe() go func() { defer w.Close() w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), node))) }() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). NamespaceParam("test").Stream(r, "STDIN") test := &testVisitor{} err := b.Do().Visit(test.Handle) if err != nil || len(test.Infos) != 1 { t.Fatalf("unexpected response: %v %#v", err, test.Infos) } info := test.Infos[0] if info.Name != "node1" || info.Namespace != "" || info.Object == nil { t.Errorf("unexpected info: %#v", info) } } func createTestDir(t *testing.T, path string) { if err := os.MkdirAll(path, 0750); err != nil { t.Fatalf("error creating test dir: %v", err) } } func writeTestFile(t *testing.T, path string, contents string) { if err := ioutil.WriteFile(path, []byte(contents), 0644); err != nil { t.Fatalf("error creating test file %#v", err) } } func TestPathBuilderWithMultiple(t *testing.T) { // create test dirs tmpDir, err := utiltesting.MkTmpdir("recursive_test_multiple") if err != nil { t.Fatalf("error creating temp dir: %v", err) } createTestDir(t, fmt.Sprintf("%s/%s", tmpDir, "recursive/pod/pod_1")) createTestDir(t, fmt.Sprintf("%s/%s", tmpDir, "recursive/rc/rc_1")) createTestDir(t, fmt.Sprintf("%s/%s", tmpDir, "inode/hardlink")) defer os.RemoveAll(tmpDir) // create test files writeTestFile(t, fmt.Sprintf("%s/recursive/pod/busybox.json", tmpDir), strings.Replace(aPod, "{id}", "0", -1)) writeTestFile(t, fmt.Sprintf("%s/recursive/pod/pod_1/busybox.json", tmpDir), strings.Replace(aPod, "{id}", "1", -1)) writeTestFile(t, fmt.Sprintf("%s/recursive/rc/busybox.json", tmpDir), strings.Replace(aRC, "{id}", "0", -1)) writeTestFile(t, fmt.Sprintf("%s/recursive/rc/rc_1/busybox.json", tmpDir), strings.Replace(aRC, "{id}", "1", -1)) writeTestFile(t, fmt.Sprintf("%s/inode/hardlink/busybox.json", tmpDir), strings.Replace(aPod, "{id}", "0", -1)) if err := os.Link(fmt.Sprintf("%s/inode/hardlink/busybox.json", tmpDir), fmt.Sprintf("%s/inode/hardlink/busybox-link.json", tmpDir)); err != nil { t.Fatalf("error creating test file: %v", err) } tests := []struct { name string object runtime.Object recursive bool directory string expectedNames []string }{ {"pod", &api.Pod{}, false, "../../../examples/pod", []string{"nginx"}}, {"recursive-pod", &api.Pod{}, true, fmt.Sprintf("%s/recursive/pod", tmpDir), []string{"busybox0", "busybox1"}}, {"rc", &api.ReplicationController{}, false, "../../../examples/guestbook/legacy/redis-master-controller.yaml", []string{"redis-master"}}, {"recursive-rc", &api.ReplicationController{}, true, fmt.Sprintf("%s/recursive/rc", tmpDir), []string{"busybox0", "busybox1"}}, {"hardlink", &api.Pod{}, false, fmt.Sprintf("%s/inode/hardlink/busybox-link.json", tmpDir), []string{"busybox0"}}, {"hardlink", &api.Pod{}, true, fmt.Sprintf("%s/inode/hardlink/busybox-link.json", tmpDir), []string{"busybox0"}}, } for _, test := range tests { b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). FilenameParam(false, &FilenameOptions{Recursive: test.recursive, Filenames: []string{test.directory}}). NamespaceParam("test").DefaultNamespace() testVisitor := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(testVisitor.Handle) if err != nil { t.Fatalf("unexpected response: %v %t %#v %s", err, singleItemImplied, testVisitor.Infos, test.name) } info := testVisitor.Infos for i, v := range info { switch test.object.(type) { case *api.Pod: if _, ok := v.Object.(*api.Pod); !ok || v.Name != test.expectedNames[i] || v.Namespace != "test" { t.Errorf("unexpected info: %#v", v) } case *api.ReplicationController: if _, ok := v.Object.(*api.ReplicationController); !ok || v.Name != test.expectedNames[i] || v.Namespace != "test" { t.Errorf("unexpected info: %#v", v) } } } } } func TestPathBuilderWithMultipleInvalid(t *testing.T) { // create test dirs tmpDir, err := utiltesting.MkTmpdir("recursive_test_multiple_invalid") if err != nil { t.Fatalf("error creating temp dir: %v", err) } createTestDir(t, fmt.Sprintf("%s/%s", tmpDir, "inode/symlink/pod")) defer os.RemoveAll(tmpDir) // create test files writeTestFile(t, fmt.Sprintf("%s/inode/symlink/pod/busybox.json", tmpDir), strings.Replace(aPod, "{id}", "0", -1)) if err := os.Symlink(fmt.Sprintf("%s/inode/symlink/pod", tmpDir), fmt.Sprintf("%s/inode/symlink/pod-link", tmpDir)); err != nil { t.Fatalf("error creating test file: %v", err) } if err := os.Symlink(fmt.Sprintf("%s/inode/symlink/loop", tmpDir), fmt.Sprintf("%s/inode/symlink/loop", tmpDir)); err != nil { t.Fatalf("error creating test file: %v", err) } tests := []struct { name string recursive bool directory string }{ {"symlink", false, fmt.Sprintf("%s/inode/symlink/pod-link", tmpDir)}, {"symlink", true, fmt.Sprintf("%s/inode/symlink/pod-link", tmpDir)}, {"loop", false, fmt.Sprintf("%s/inode/symlink/loop", tmpDir)}, {"loop", true, fmt.Sprintf("%s/inode/symlink/loop", tmpDir)}, } for _, test := range tests { b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). FilenameParam(false, &FilenameOptions{Recursive: test.recursive, Filenames: []string{test.directory}}). NamespaceParam("test").DefaultNamespace() testVisitor := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(testVisitor.Handle) if err == nil { t.Fatalf("unexpected response: %v %t %#v %s", err, singleItemImplied, testVisitor.Infos, test.name) } } } func TestDirectoryBuilder(t *testing.T) { b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{"../../../examples/guestbook/legacy"}}). NamespaceParam("test").DefaultNamespace() test := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || singleItemImplied || len(test.Infos) < 3 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } found := false for _, info := range test.Infos { if info.Name == "redis-master" && info.Namespace == "test" && info.Object != nil { found = true break } } if !found { t.Errorf("unexpected responses: %#v", test.Infos) } } func TestNamespaceOverride(t *testing.T) { s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { w.WriteHeader(http.StatusOK) w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), &api.Pod{ObjectMeta: api.ObjectMeta{Namespace: "foo", Name: "test"}}))) })) defer s.Close() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{s.URL}}). NamespaceParam("test") test := &testVisitor{} err := b.Do().Visit(test.Handle) if err != nil || len(test.Infos) != 1 && test.Infos[0].Namespace != "foo" { t.Fatalf("unexpected response: %v %#v", err, test.Infos) } b = NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). FilenameParam(true, &FilenameOptions{Recursive: false, Filenames: []string{s.URL}}). NamespaceParam("test") test = &testVisitor{} err = b.Do().Visit(test.Handle) if err == nil { t.Fatalf("expected namespace error. got: %#v", test.Infos) } } func TestURLBuilder(t *testing.T) { s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { w.WriteHeader(http.StatusOK) w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), &api.Pod{ObjectMeta: api.ObjectMeta{Namespace: "foo", Name: "test"}}))) w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), &api.Pod{ObjectMeta: api.ObjectMeta{Namespace: "foo", Name: "test1"}}))) })) defer s.Close() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{s.URL}}). NamespaceParam("foo") test := &testVisitor{} err := b.Do().Visit(test.Handle) if err != nil || len(test.Infos) != 2 { t.Fatalf("unexpected response: %v %#v", err, test.Infos) } info := test.Infos[0] if info.Name != "test" || info.Namespace != "foo" || info.Object == nil { t.Errorf("unexpected info: %#v", info) } info = test.Infos[1] if info.Name != "test1" || info.Namespace != "foo" || info.Object == nil { t.Errorf("unexpected info: %#v", info) } } func TestURLBuilderRequireNamespace(t *testing.T) { s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { w.WriteHeader(http.StatusOK) w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), &api.Pod{ObjectMeta: api.ObjectMeta{Namespace: "foo", Name: "test"}}))) })) defer s.Close() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{s.URL}}). NamespaceParam("test").RequireNamespace() test := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err == nil || !singleItemImplied || len(test.Infos) != 0 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } } func TestResourceByName(t *testing.T) { pods, _ := testData() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods/foo": runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[0]), }), testapi.Default.Codec()). NamespaceParam("test") test := &testVisitor{} singleItemImplied := false if b.Do().Err() == nil { t.Errorf("unexpected non-error") } b.ResourceTypeOrNameArgs(true, "pods", "foo") err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || !singleItemImplied || len(test.Infos) != 1 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } if !reflect.DeepEqual(&pods.Items[0], test.Objects()[0]) { t.Errorf("unexpected object: %#v", test.Objects()[0]) } mapping, err := b.Do().ResourceMapping() if err != nil { t.Fatalf("unexpected error: %v", err) } if mapping.Resource != "pods" { t.Errorf("unexpected resource mapping: %#v", mapping) } } func TestMultipleResourceByTheSameName(t *testing.T) { pods, svcs := testData() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods/foo": runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[0]), "/namespaces/test/pods/baz": runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[1]), "/namespaces/test/services/foo": runtime.EncodeOrDie(testapi.Default.Codec(), &svcs.Items[0]), "/namespaces/test/services/baz": runtime.EncodeOrDie(testapi.Default.Codec(), &svcs.Items[0]), }), testapi.Default.Codec()). NamespaceParam("test") test := &testVisitor{} singleItemImplied := false if b.Do().Err() == nil { t.Errorf("unexpected non-error") } b.ResourceTypeOrNameArgs(true, "pods,services", "foo", "baz") err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || singleItemImplied || len(test.Infos) != 4 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } if !api.Semantic.DeepDerivative([]runtime.Object{&pods.Items[0], &pods.Items[1], &svcs.Items[0], &svcs.Items[0]}, test.Objects()) { t.Errorf("unexpected visited objects: %#v", test.Objects()) } if _, err := b.Do().ResourceMapping(); err == nil { t.Errorf("unexpected non-error") } } func TestResourceNames(t *testing.T) { pods, svc := testData() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods/foo": runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[0]), "/namespaces/test/services/baz": runtime.EncodeOrDie(testapi.Default.Codec(), &svc.Items[0]), }), testapi.Default.Codec()). NamespaceParam("test") test := &testVisitor{} if b.Do().Err() == nil { t.Errorf("unexpected non-error") } b.ResourceNames("pods", "foo", "services/baz") err := b.Do().Visit(test.Handle) if err != nil || len(test.Infos) != 2 { t.Fatalf("unexpected response: %v %#v", err, test.Infos) } if !reflect.DeepEqual(&pods.Items[0], test.Objects()[0]) { t.Errorf("unexpected object: \n%#v, expected: \n%#v", test.Objects()[0], &pods.Items[0]) } if !reflect.DeepEqual(&svc.Items[0], test.Objects()[1]) { t.Errorf("unexpected object: \n%#v, expected: \n%#v", test.Objects()[1], &svc.Items[0]) } } func TestResourceNamesWithoutResource(t *testing.T) { pods, svc := testData() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods/foo": runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[0]), "/namespaces/test/services/baz": runtime.EncodeOrDie(testapi.Default.Codec(), &svc.Items[0]), }), testapi.Default.Codec()). NamespaceParam("test") test := &testVisitor{} if b.Do().Err() == nil { t.Errorf("unexpected non-error") } b.ResourceNames("", "foo", "services/baz") err := b.Do().Visit(test.Handle) if err == nil || !strings.Contains(err.Error(), "must be RESOURCE/NAME") { t.Fatalf("unexpected response: %v", err) } } func TestResourceByNameWithoutRequireObject(t *testing.T) { b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{}), testapi.Default.Codec()). NamespaceParam("test") test := &testVisitor{} singleItemImplied := false if b.Do().Err() == nil { t.Errorf("unexpected non-error") } b.ResourceTypeOrNameArgs(true, "pods", "foo").RequireObject(false) err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || !singleItemImplied || len(test.Infos) != 1 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } if test.Infos[0].Name != "foo" { t.Errorf("unexpected name: %#v", test.Infos[0].Name) } if test.Infos[0].Object != nil { t.Errorf("unexpected object: %#v", test.Infos[0].Object) } mapping, err := b.Do().ResourceMapping() if err != nil { t.Fatalf("unexpected error: %v", err) } if mapping.GroupVersionKind.Kind != "Pod" || mapping.Resource != "pods" { t.Errorf("unexpected resource mapping: %#v", mapping) } } func TestResourceByNameAndEmptySelector(t *testing.T) { pods, _ := testData() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods/foo": runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[0]), }), testapi.Default.Codec()). NamespaceParam("test"). SelectorParam(""). ResourceTypeOrNameArgs(true, "pods", "foo") singleItemImplied := false infos, err := b.Do().IntoSingleItemImplied(&singleItemImplied).Infos() if err != nil || !singleItemImplied || len(infos) != 1 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, infos) } if !reflect.DeepEqual(&pods.Items[0], infos[0].Object) { t.Errorf("unexpected object: %#v", infos[0]) } mapping, err := b.Do().ResourceMapping() if err != nil { t.Fatalf("unexpected error: %v", err) } if mapping.Resource != "pods" { t.Errorf("unexpected resource mapping: %#v", mapping) } } func TestSelector(t *testing.T) { pods, svc := testData() labelKey := unversioned.LabelSelectorQueryParam(registered.GroupOrDie(api.GroupName).GroupVersion.String()) b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods?" + labelKey + "=a%3Db": runtime.EncodeOrDie(testapi.Default.Codec(), pods), "/namespaces/test/services?" + labelKey + "=a%3Db": runtime.EncodeOrDie(testapi.Default.Codec(), svc), }), testapi.Default.Codec()). SelectorParam("a=b"). NamespaceParam("test"). Flatten() test := &testVisitor{} singleItemImplied := false if b.Do().Err() == nil { t.Errorf("unexpected non-error") } b.ResourceTypeOrNameArgs(true, "pods,service") err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || singleItemImplied || len(test.Infos) != 3 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } if !api.Semantic.DeepDerivative([]runtime.Object{&pods.Items[0], &pods.Items[1], &svc.Items[0]}, test.Objects()) { t.Errorf("unexpected visited objects: %#v", test.Objects()) } if _, err := b.Do().ResourceMapping(); err == nil { t.Errorf("unexpected non-error") } } func TestSelectorRequiresKnownTypes(t *testing.T) { b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). SelectorParam("a=b"). NamespaceParam("test"). ResourceTypes("unknown") if b.Do().Err() == nil { t.Errorf("unexpected non-error") } } func TestSingleResourceType(t *testing.T) { b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). SelectorParam("a=b"). SingleResourceType(). ResourceTypeOrNameArgs(true, "pods,services") if b.Do().Err() == nil { t.Errorf("unexpected non-error") } } func TestResourceTuple(t *testing.T) { expectNoErr := func(err error) bool { return err == nil } expectErr := func(err error) bool { return err != nil } testCases := map[string]struct { args []string errFn func(error) bool }{ "valid": { args: []string{"pods/foo"}, errFn: expectNoErr, }, "valid multiple with name indirection": { args: []string{"pods/foo", "pod/bar"}, errFn: expectNoErr, }, "valid multiple with namespaced and non-namespaced types": { args: []string{"nodes/foo", "pod/bar"}, errFn: expectNoErr, }, "mixed arg types": { args: []string{"pods/foo", "bar"}, errFn: expectErr, }, /*"missing resource": { args: []string{"pods/foo2"}, errFn: expectNoErr, // not an error because resources are lazily visited },*/ "comma in resource": { args: []string{",pods/foo"}, errFn: expectErr, }, "multiple types in resource": { args: []string{"pods,services/foo"}, errFn: expectErr, }, "unknown resource type": { args: []string{"unknown/foo"}, errFn: expectErr, }, "leading slash": { args: []string{"/bar"}, errFn: expectErr, }, "trailing slash": { args: []string{"bar/"}, errFn: expectErr, }, } for k, testCase := range testCases { for _, requireObject := range []bool{true, false} { expectedRequests := map[string]string{} if requireObject { pods, _ := testData() expectedRequests = map[string]string{ "/namespaces/test/pods/foo": runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[0]), "/namespaces/test/pods/bar": runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[0]), "/nodes/foo": runtime.EncodeOrDie(testapi.Default.Codec(), &api.Node{ObjectMeta: api.ObjectMeta{Name: "foo"}}), } } b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith(k, t, expectedRequests), testapi.Default.Codec()). NamespaceParam("test").DefaultNamespace(). ResourceTypeOrNameArgs(true, testCase.args...).RequireObject(requireObject) r := b.Do() if !testCase.errFn(r.Err()) { t.Errorf("%s: unexpected error: %v", k, r.Err()) } if r.Err() != nil { continue } switch { case (r.singleItemImplied && len(testCase.args) != 1), (!r.singleItemImplied && len(testCase.args) == 1): t.Errorf("%s: result had unexpected singleItemImplied value", k) } info, err := r.Infos() if err != nil { // test error continue } if len(info) != len(testCase.args) { t.Errorf("%s: unexpected number of infos returned: %#v", k, info) } } } } func TestStream(t *testing.T) { r, pods, rc := streamTestData() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). NamespaceParam("test").Stream(r, "STDIN").Flatten() test := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || singleItemImplied || len(test.Infos) != 3 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } if !api.Semantic.DeepDerivative([]runtime.Object{&pods.Items[0], &pods.Items[1], &rc.Items[0]}, test.Objects()) { t.Errorf("unexpected visited objects: %#v", test.Objects()) } } func TestYAMLStream(t *testing.T) { r, pods, rc := streamYAMLTestData() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). NamespaceParam("test").Stream(r, "STDIN").Flatten() test := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || singleItemImplied || len(test.Infos) != 3 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } if !api.Semantic.DeepDerivative([]runtime.Object{&pods.Items[0], &pods.Items[1], &rc.Items[0]}, test.Objects()) { t.Errorf("unexpected visited objects: %#v", test.Objects()) } } func TestMultipleObject(t *testing.T) { r, pods, svc := streamTestData() obj, err := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). NamespaceParam("test").Stream(r, "STDIN").Flatten(). Do().Object() if err != nil { t.Fatalf("unexpected error: %v", err) } expected := &api.List{ Items: []runtime.Object{ &pods.Items[0], &pods.Items[1], &svc.Items[0], }, } if !api.Semantic.DeepDerivative(expected, obj) { t.Errorf("unexpected visited objects: %#v", obj) } } func TestContinueOnErrorVisitor(t *testing.T) { r, _, _ := streamTestData() req := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). ContinueOnError(). NamespaceParam("test").Stream(r, "STDIN").Flatten(). Do() count := 0 testErr := fmt.Errorf("test error") err := req.Visit(func(_ *Info, _ error) error { count++ if count > 1 { return testErr } return nil }) if err == nil { t.Fatalf("unexpected error: %v", err) } if count != 3 { t.Fatalf("did not visit all infos: %d", count) } agg, ok := err.(utilerrors.Aggregate) if !ok { t.Fatalf("unexpected error: %v", err) } if len(agg.Errors()) != 2 || agg.Errors()[0] != testErr || agg.Errors()[1] != testErr { t.Fatalf("unexpected error: %v", err) } } func TestSingleItemImpliedObject(t *testing.T) { obj, err := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). NamespaceParam("test").DefaultNamespace(). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{"../../../examples/guestbook/legacy/redis-master-controller.yaml"}}). Flatten(). Do().Object() if err != nil { t.Fatalf("unexpected error: %v", err) } rc, ok := obj.(*api.ReplicationController) if !ok { t.Fatalf("unexpected object: %#v", obj) } if rc.Name != "redis-master" || rc.Namespace != "test" { t.Errorf("unexpected controller: %#v", rc) } } func TestSingleItemImpliedObjectNoExtension(t *testing.T) { obj, err := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). NamespaceParam("test").DefaultNamespace(). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{"../../../examples/pod"}}). Flatten(). Do().Object() if err != nil { t.Fatalf("unexpected error: %v", err) } pod, ok := obj.(*api.Pod) if !ok { t.Fatalf("unexpected object: %#v", obj) } if pod.Name != "nginx" || pod.Namespace != "test" { t.Errorf("unexpected pod: %#v", pod) } } func TestSingleItemImpliedRootScopedObject(t *testing.T) { node := &api.Node{ObjectMeta: api.ObjectMeta{Name: "test"}, Spec: api.NodeSpec{ExternalID: "test"}} r := streamTestObject(node) infos, err := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). NamespaceParam("test").DefaultNamespace(). Stream(r, "STDIN"). Flatten(). Do().Infos() if err != nil || len(infos) != 1 { t.Fatalf("unexpected error: %v", err) } if infos[0].Namespace != "" { t.Errorf("namespace should be empty: %#v", infos[0]) } n, ok := infos[0].Object.(*api.Node) if !ok { t.Fatalf("unexpected object: %#v", infos[0].Object) } if n.Name != "test" || n.Namespace != "" { t.Errorf("unexpected object: %#v", n) } } func TestListObject(t *testing.T) { pods, _ := testData() labelKey := unversioned.LabelSelectorQueryParam(registered.GroupOrDie(api.GroupName).GroupVersion.String()) b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods?" + labelKey + "=a%3Db": runtime.EncodeOrDie(testapi.Default.Codec(), pods), }), testapi.Default.Codec()). SelectorParam("a=b"). NamespaceParam("test"). ResourceTypeOrNameArgs(true, "pods"). Flatten() obj, err := b.Do().Object() if err != nil { t.Fatalf("unexpected error: %v", err) } list, ok := obj.(*api.List) if !ok { t.Fatalf("unexpected object: %#v", obj) } if list.ResourceVersion != pods.ResourceVersion || len(list.Items) != 2 { t.Errorf("unexpected list: %#v", list) } mapping, err := b.Do().ResourceMapping() if err != nil { t.Fatalf("unexpected error: %v", err) } if mapping.Resource != "pods" { t.Errorf("unexpected resource mapping: %#v", mapping) } } func TestListObjectWithDifferentVersions(t *testing.T) { pods, svc := testData() labelKey := unversioned.LabelSelectorQueryParam(registered.GroupOrDie(api.GroupName).GroupVersion.String()) obj, err := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods?" + labelKey + "=a%3Db": runtime.EncodeOrDie(testapi.Default.Codec(), pods), "/namespaces/test/services?" + labelKey + "=a%3Db": runtime.EncodeOrDie(testapi.Default.Codec(), svc), }), testapi.Default.Codec()). SelectorParam("a=b"). NamespaceParam("test"). ResourceTypeOrNameArgs(true, "pods,services"). Flatten(). Do().Object() if err != nil { t.Fatalf("unexpected error: %v", err) } list, ok := obj.(*api.List) if !ok { t.Fatalf("unexpected object: %#v", obj) } // resource version differs between type lists, so it's not possible to get a single version. if list.ResourceVersion != "" || len(list.Items) != 3 { t.Errorf("unexpected list: %#v", list) } } func TestWatch(t *testing.T) { _, svc := testData() w, err := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/watch/namespaces/test/services/redis-master?resourceVersion=12": watchBody(watch.Event{ Type: watch.Added, Object: &svc.Items[0], }), }), testapi.Default.Codec()). NamespaceParam("test").DefaultNamespace(). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{"../../../examples/guestbook/redis-master-service.yaml"}}).Flatten(). Do().Watch("12") if err != nil { t.Fatalf("unexpected error: %v", err) } defer w.Stop() ch := w.ResultChan() select { case obj := <-ch: if obj.Type != watch.Added { t.Fatalf("unexpected watch event %#v", obj) } service, ok := obj.Object.(*api.Service) if !ok { t.Fatalf("unexpected object: %#v", obj) } if service.Name != "baz" || service.ResourceVersion != "12" { t.Errorf("unexpected service: %#v", service) } } } func TestWatchMultipleError(t *testing.T) { _, err := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). NamespaceParam("test").DefaultNamespace(). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{"../../../examples/guestbook/legacy/redis-master-controller.yaml"}}).Flatten(). FilenameParam(false, &FilenameOptions{Recursive: false, Filenames: []string{"../../../examples/guestbook/legacy/redis-master-controller.yaml"}}).Flatten(). Do().Watch("") if err == nil { t.Fatalf("unexpected non-error") } } func TestLatest(t *testing.T) { r, _, _ := streamTestData() newPod := &api.Pod{ ObjectMeta: api.ObjectMeta{Name: "foo", Namespace: "test", ResourceVersion: "13"}, } newPod2 := &api.Pod{ ObjectMeta: api.ObjectMeta{Name: "bar", Namespace: "test", ResourceVersion: "14"}, } newSvc := &api.Service{ ObjectMeta: api.ObjectMeta{Name: "baz", Namespace: "test", ResourceVersion: "15"}, } b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClientWith("", t, map[string]string{ "/namespaces/test/pods/foo": runtime.EncodeOrDie(testapi.Default.Codec(), newPod), "/namespaces/test/pods/bar": runtime.EncodeOrDie(testapi.Default.Codec(), newPod2), "/namespaces/test/services/baz": runtime.EncodeOrDie(testapi.Default.Codec(), newSvc), }), testapi.Default.Codec()). NamespaceParam("other").Stream(r, "STDIN").Flatten().Latest() test := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err != nil || singleItemImplied || len(test.Infos) != 3 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } if !api.Semantic.DeepDerivative([]runtime.Object{newPod, newPod2, newSvc}, test.Objects()) { t.Errorf("unexpected visited objects: %#v", test.Objects()) } } func TestReceiveMultipleErrors(t *testing.T) { pods, svc := testData() r, w := io.Pipe() go func() { defer w.Close() w.Write([]byte(`{}`)) w.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), &pods.Items[0]))) }() r2, w2 := io.Pipe() go func() { defer w2.Close() w2.Write([]byte(`{}`)) w2.Write([]byte(runtime.EncodeOrDie(testapi.Default.Codec(), &svc.Items[0]))) }() b := NewBuilder(testapi.Default.RESTMapper(), api.Scheme, fakeClient(), testapi.Default.Codec()). Stream(r, "1").Stream(r2, "2"). ContinueOnError() test := &testVisitor{} singleItemImplied := false err := b.Do().IntoSingleItemImplied(&singleItemImplied).Visit(test.Handle) if err == nil || singleItemImplied || len(test.Infos) != 2 { t.Fatalf("unexpected response: %v %t %#v", err, singleItemImplied, test.Infos) } errs, ok := err.(utilerrors.Aggregate) if !ok { t.Fatalf("unexpected error: %v", reflect.TypeOf(err)) } if len(errs.Errors()) != 2 { t.Errorf("unexpected errors %v", errs) } } func TestHasNames(t *testing.T) { tests := []struct { args []string expectedHasName bool expectedError error }{ { args: []string{""}, expectedHasName: false, expectedError: nil, }, { args: []string{"rc"}, expectedHasName: false, expectedError: nil, }, { args: []string{"rc,pod,svc"}, expectedHasName: false, expectedError: nil, }, { args: []string{"rc/foo"}, expectedHasName: true, expectedError: nil, }, { args: []string{"rc", "foo"}, expectedHasName: true, expectedError: nil, }, { args: []string{"rc,pod,svc", "foo"}, expectedHasName: true, expectedError: nil, }, { args: []string{"rc/foo", "rc/bar", "rc/zee"}, expectedHasName: true, expectedError: nil, }, { args: []string{"rc/foo", "bar"}, expectedHasName: false, expectedError: fmt.Errorf("there is no need to specify a resource type as a separate argument when passing arguments in resource/name form (e.g. 'resource.test get resource/<resource_name>' instead of 'resource.test get resource resource/<resource_name>'"), }, } for _, test := range tests { hasNames, err := HasNames(test.args) if !reflect.DeepEqual(test.expectedError, err) { t.Errorf("expected HasName to error %v, got %s", test.expectedError, err) } if hasNames != test.expectedHasName { t.Errorf("expected HasName to return %v for %s", test.expectedHasName, test.args) } } }
apache-2.0
lukecwik/incubator-beam
sdks/java/testing/test-utils/src/main/java/org/apache/beam/sdk/testutils/metrics/TimeMonitor.java
1700
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.testutils.metrics; import org.apache.beam.sdk.metrics.Distribution; import org.apache.beam.sdk.metrics.Metrics; import org.apache.beam.sdk.transforms.DoFn; /** * Monitor that records processing time distribution in the pipeline. * * <p>To use: apply a monitor directly after each source and sink transform. This will capture a * distribution of element processing timestamps, which can be collected and queried using {@link * org.apache.beam.sdk.testutils.metrics.MetricsReader}. */ public class TimeMonitor<T> extends DoFn<T, T> { private Distribution timeDistribution; public TimeMonitor(String namespace, String name) { this.timeDistribution = Metrics.distribution(namespace, name); } @ProcessElement public void processElement(ProcessContext c) { timeDistribution.update(System.currentTimeMillis()); c.output(c.element()); } }
apache-2.0
N03297857/2017Fall
node_modules/@angular/common/src/directives/ng_for_of.d.ts
4590
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import { DoCheck, IterableDiffers, NgIterable, OnChanges, SimpleChanges, TemplateRef, TrackByFunction, ViewContainerRef } from '@angular/core'; /** * @stable */ export declare class NgForOfContext<T> { $implicit: T; ngForOf: NgIterable<T>; index: number; count: number; constructor($implicit: T, ngForOf: NgIterable<T>, index: number, count: number); readonly first: boolean; readonly last: boolean; readonly even: boolean; readonly odd: boolean; } /** * The `NgForOf` directive instantiates a template once per item from an iterable. The context * for each instantiated template inherits from the outer context with the given loop variable * set to the current item from the iterable. * * ### Local Variables * * `NgForOf` provides several exported values that can be aliased to local variables: * * - `$implicit: T`: The value of the individual items in the iterable (`ngForOf`). * - `ngForOf: NgIterable<T>`: The value of the iterable expression. Useful when the expression is * more complex then a property access, for example when using the async pipe (`userStreams | * async`). * - `index: number`: The index of the current item in the iterable. * - `first: boolean`: True when the item is the first item in the iterable. * - `last: boolean`: True when the item is the last item in the iterable. * - `even: boolean`: True when the item has an even index in the iterable. * - `odd: boolean`: True when the item has an odd index in the iterable. * * ``` * <li *ngFor="let user of userObservable | async as users; index as i; first as isFirst"> * {{i}}/{{users.length}}. {{user}} <span *ngIf="isFirst">default</span> * </li> * ``` * * ### Change Propagation * * When the contents of the iterator changes, `NgForOf` makes the corresponding changes to the DOM: * * * When an item is added, a new instance of the template is added to the DOM. * * When an item is removed, its template instance is removed from the DOM. * * When items are reordered, their respective templates are reordered in the DOM. * * Otherwise, the DOM element for that item will remain the same. * * Angular uses object identity to track insertions and deletions within the iterator and reproduce * those changes in the DOM. This has important implications for animations and any stateful * controls (such as `<input>` elements which accept user input) that are present. Inserted rows can * be animated in, deleted rows can be animated out, and unchanged rows retain any unsaved state * such as user input. * * It is possible for the identities of elements in the iterator to change while the data does not. * This can happen, for example, if the iterator produced from an RPC to the server, and that * RPC is re-run. Even if the data hasn't changed, the second response will produce objects with * different identities, and Angular will tear down the entire DOM and rebuild it (as if all old * elements were deleted and all new elements inserted). This is an expensive operation and should * be avoided if possible. * * To customize the default tracking algorithm, `NgForOf` supports `trackBy` option. * `trackBy` takes a function which has two arguments: `index` and `item`. * If `trackBy` is given, Angular tracks changes by the return value of the function. * * ### Syntax * * - `<li *ngFor="let item of items; index as i; trackBy: trackByFn">...</li>` * * With `<ng-template>` element: * * ``` * <ng-template ngFor let-item [ngForOf]="items" let-i="index" [ngForTrackBy]="trackByFn"> * <li>...</li> * </ng-template> * ``` * * ### Example * * See a [live demo](http://plnkr.co/edit/KVuXxDp0qinGDyo307QW?p=preview) for a more detailed * example. * * @stable */ export declare class NgForOf<T> implements DoCheck, OnChanges { private _viewContainer; private _template; private _differs; ngForOf: NgIterable<T>; ngForTrackBy: TrackByFunction<T>; private _differ; private _trackByFn; constructor(_viewContainer: ViewContainerRef, _template: TemplateRef<NgForOfContext<T>>, _differs: IterableDiffers); ngForTemplate: TemplateRef<NgForOfContext<T>>; ngOnChanges(changes: SimpleChanges): void; ngDoCheck(): void; private _applyChanges(changes); private _perViewChange(view, record); } export declare function getTypeNameForDebugging(type: any): string;
apache-2.0
bayzid026/TrainingContent
O3652/O3652-7 Deep Dive into Security and OAuth/Completed Projects/Exercise02/OfficeOAuth/OfficeOAuth/CalendarApiSample.cs
1422
using Microsoft.Office365.Exchange; using Microsoft.Office365.OAuth; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace OfficeOAuth { public static class CalendarAPISample { const string ExchangeResourceId = "https://outlook.office365.com"; const string ExchangeServiceRoot = "https://outlook.office365.com/ews/odata"; public static async Task<IOrderedEnumerable<IEvent>> GetCalendarEvents() { var client = await EnsureClientCreated(); // Obtain calendar event data var eventsResults = await (from i in client.Me.Events where i.End >= DateTimeOffset.UtcNow select i).Take(10).ExecuteAsync(); var events = eventsResults.CurrentPage.OrderBy(e => e.Start); return events; } private static async Task<ExchangeClient> EnsureClientCreated() { Authenticator authenticator = new Authenticator(); var authInfo = await authenticator.AuthenticateAsync(ExchangeResourceId); return new ExchangeClient(new Uri(ExchangeServiceRoot), authInfo.GetAccessToken); } public static void SignOut(Uri postLogoutRedirect) { new Authenticator().Logout(postLogoutRedirect); } } }
apache-2.0
anindoasaha/php_nginx
php-5.5.16/ext/standard/tests/array/array_uintersect_uassoc_variation3.phpt
8351
--TEST-- Test array_uintersect_uassoc() function : usage variation --FILE-- <?php /* Prototype : array array_uintersect_uassoc(array arr1, array arr2 [, array ...], callback data_compare_func, callback key_compare_func) * Description: Returns the entries of arr1 that have values which are present in all the other arguments. Keys are used to do more restrictive check. Both data and keys are compared by using user-supplied callbacks. * Source code: ext/standard/array.c * Alias to functions: */ echo "*** Testing array_uintersect_uassoc() : usage variation ***\n"; // Initialise function arguments not being substituted (if any) $arr1 = array(1, 2); $arr2 = array(1, 2); include('compare_function.inc'); $key_compare_func = 'compare_function'; //get an unset variable $unset_var = 10; unset ($unset_var); // define some classes class classWithToString { public function __toString() { return "Class A object"; } } class classWithoutToString { } // heredoc string $heredoc = <<<EOT hello world EOT; // add arrays $index_array = array (1, 2, 3); $assoc_array = array ('one' => 1, 'two' => 2); //array of values to iterate over $inputs = array( // int data 'int 0' => 0, 'int 1' => 1, 'int 12345' => 12345, 'int -12345' => -2345, // float data 'float 10.5' => 10.5, 'float -10.5' => -10.5, 'float 12.3456789000e10' => 12.3456789000e10, 'float -12.3456789000e10' => -12.3456789000e10, 'float .5' => .5, // array data 'empty array' => array(), 'int indexed array' => $index_array, 'associative array' => $assoc_array, 'nested arrays' => array('foo', $index_array, $assoc_array), // null data 'uppercase NULL' => NULL, 'lowercase null' => null, // boolean data 'lowercase true' => true, 'lowercase false' =>false, 'uppercase TRUE' =>TRUE, 'uppercase FALSE' =>FALSE, // empty data 'empty string DQ' => "", 'empty string SQ' => '', // string data 'string DQ' => "string", 'string SQ' => 'string', 'mixed case string' => "sTrInG", 'heredoc' => $heredoc, // object data 'instance of classWithToString' => new classWithToString(), 'instance of classWithoutToString' => new classWithoutToString(), // undefined data 'undefined var' => @$undefined_var, // unset data 'unset var' => @$unset_var, ); // loop through each element of the array for data_compare_func foreach($inputs as $key =>$value) { echo "\n--$key--\n"; var_dump( array_uintersect_uassoc($arr1, $arr2, $value, $key_compare_func) ); }; ?> ===DONE=== --EXPECTF-- *** Testing array_uintersect_uassoc() : usage variation *** --int 0-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --int 1-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --int 12345-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --int -12345-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --float 10.5-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --float -10.5-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --float 12.3456789000e10-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --float -12.3456789000e10-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --float .5-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --empty array-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, array must have exactly two members in %sarray_uintersect_uassoc_variation3.php on line %d NULL --int indexed array-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, array must have exactly two members in %sarray_uintersect_uassoc_variation3.php on line %d NULL --associative array-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, first array member is not a valid class name or object in %sarray_uintersect_uassoc_variation3.php on line %d NULL --nested arrays-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, array must have exactly two members in %sarray_uintersect_uassoc_variation3.php on line %d NULL --uppercase NULL-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --lowercase null-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --lowercase true-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --lowercase false-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --uppercase TRUE-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --uppercase FALSE-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --empty string DQ-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, function '' not found or invalid function name in %sarray_uintersect_uassoc_variation3.php on line %d NULL --empty string SQ-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, function '' not found or invalid function name in %sarray_uintersect_uassoc_variation3.php on line %d NULL --string DQ-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, function 'string' not found or invalid function name in %sarray_uintersect_uassoc_variation3.php on line %d NULL --string SQ-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, function 'string' not found or invalid function name in %sarray_uintersect_uassoc_variation3.php on line %d NULL --mixed case string-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, function 'sTrInG' not found or invalid function name in %sarray_uintersect_uassoc_variation3.php on line %d NULL --heredoc-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, function 'hello world' not found or invalid function name in %sarray_uintersect_uassoc_variation3.php on line %d NULL --instance of classWithToString-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --instance of classWithoutToString-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --undefined var-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL --unset var-- Warning: array_uintersect_uassoc() expects parameter 3 to be a valid callback, no array or string given in %sarray_uintersect_uassoc_variation3.php on line %d NULL ===DONE===
apache-2.0
cswaroop/sql-parser
src/main/java/com/foundationdb/sql/compiler/TimeTypeCompiler.java
5720
/** * Copyright 2011-2013 FoundationDB, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* The original from which this derives bore the following: */ /* Derby - Class org.apache.derby.impl.sql.compile.TimeTypeCompiler Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.foundationdb.sql.compiler; import com.foundationdb.sql.parser.*; import com.foundationdb.sql.StandardException; import com.foundationdb.sql.types.DataTypeDescriptor; import com.foundationdb.sql.types.TypeId; import java.sql.Types; public class TimeTypeCompiler extends TypeCompiler { protected TimeTypeCompiler(TypeId typeId) { super(typeId); } /** * User types are convertible to other user types only if * (for now) they are the same type and are being used to * implement some JDBC type. This is sufficient for * date/time types; it may be generalized later for e.g. * comparison of any user type with one of its subtypes. * * @see TypeCompiler#convertible */ public boolean convertible(TypeId otherType, boolean forDataTypeFunction) { if (otherType.isStringTypeId() && (!otherType.isLOBTypeId()) && !otherType.isLongVarcharTypeId()) { return true; } // If same type, convert always ok. return (getStoredFormatIdFromTypeId() == otherType.getTypeFormatId()); } /** @see TypeCompiler#compatible */ public boolean compatible(TypeId otherType) { return convertible(otherType, false); } /** * @see TypeCompiler#getCorrespondingPrimitiveTypeName */ public String getCorrespondingPrimitiveTypeName() { return "java.sql.Time"; } /** * Get the method name for getting out the corresponding primitive * Java type. * * @return String The method call name for getting the * corresponding primitive Java type. */ public String getPrimitiveMethodName() { return "getTime"; } /** * @see TypeCompiler#getCastToCharWidth */ public int getCastToCharWidth(DataTypeDescriptor dts) { return TypeId.TIME_MAXWIDTH; } /** * @see TypeCompiler#resolveArithmeticOperation * * @exception StandardException Thrown on error */ public DataTypeDescriptor resolveArithmeticOperation(DataTypeDescriptor leftType, DataTypeDescriptor rightType, String operator) throws StandardException { TypeId rightTypeId = rightType.getTypeId(); boolean nullable = leftType.isNullable() || rightType.isNullable(); if (rightTypeId.isDateTimeTimeStampTypeId()) { if (operator.equals(TypeCompiler.MINUS_OP)) { switch (rightTypeId.getTypeFormatId()) { case TypeId.FormatIds.TIME_TYPE_ID: // TIME - TIME is INTERVAL HOUR TO SECOND return new DataTypeDescriptor(TypeId.INTERVAL_HOUR_SECOND_ID, nullable); } // TIME - other datetime is INTERVAL DAY TO SECOND return new DataTypeDescriptor(TypeId.INTERVAL_DAY_SECOND_ID, nullable); } } else if (rightTypeId.isIntervalTypeId()) { if (operator.equals(TypeCompiler.PLUS_OP) || operator.equals(TypeCompiler.MINUS_OP)) { switch (rightTypeId.getTypeFormatId()) { case TypeId.FormatIds.INTERVAL_DAY_SECOND_ID: if ((rightTypeId == TypeId.INTERVAL_HOUR_ID) || (rightTypeId == TypeId.INTERVAL_MINUTE_ID) || (rightTypeId == TypeId.INTERVAL_SECOND_ID) || (rightTypeId == TypeId.INTERVAL_HOUR_MINUTE_ID) || (rightTypeId == TypeId.INTERVAL_HOUR_SECOND_ID) || (rightTypeId == TypeId.INTERVAL_MINUTE_SECOND_ID)) // TIME +/- sub day interval is TIME return leftType.getNullabilityType(nullable); } // TIME +/- other interval is TIMESTAMP return new DataTypeDescriptor(TypeId.TIMESTAMP_ID, nullable); } } // Unsupported return super.resolveArithmeticOperation(leftType, rightType, operator); } }
apache-2.0
ThiagoGarciaAlves/jmeter
src/core/org/apache/jmeter/samplers/StatisticalSampleSender.java
8733
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.samplers; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; import java.io.ObjectStreamException; import java.io.Serializable; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Implements batch reporting for remote testing. * */ public class StatisticalSampleSender extends AbstractSampleSender implements Serializable { private static final long serialVersionUID = 240L; private static final Logger log = LoggingManager.getLoggerForClass(); private static final int DEFAULT_NUM_SAMPLE_THRESHOLD = 100; private static final long DEFAULT_TIME_THRESHOLD = 60000L; // Static fields are set by the server when the class is constructed private static final int NUM_SAMPLES_THRESHOLD = JMeterUtils.getPropDefault( "num_sample_threshold", DEFAULT_NUM_SAMPLE_THRESHOLD); private static final long TIME_THRESHOLD_MS = JMeterUtils.getPropDefault("time_threshold", DEFAULT_TIME_THRESHOLD); // should the samples be aggregated on thread name or thread group (default) ? private static boolean KEY_ON_THREADNAME = JMeterUtils.getPropDefault("key_on_threadname", false); // Instance fields are constructed by the client when the instance is create in the test plan // and the field values are then transferred to the server copy by RMI serialisation/deserialisation private final int clientConfiguredNumSamplesThreshold = JMeterUtils.getPropDefault( "num_sample_threshold", DEFAULT_NUM_SAMPLE_THRESHOLD); private final long clientConfiguredTimeThresholdMs = JMeterUtils.getPropDefault("time_threshold", DEFAULT_TIME_THRESHOLD); // should the samples be aggregated on thread name or thread group (default) ? private final boolean clientConfiguredKeyOnThreadName = JMeterUtils.getPropDefault("key_on_threadname", false); private final RemoteSampleListener listener; private final List<SampleEvent> sampleStore = new ArrayList<>(); //@GuardedBy("sampleStore") TODO perhaps use ConcurrentHashMap ? private final Map<String, StatisticalSampleResult> sampleTable = new HashMap<>(); // Settings; readResolve sets these from the server/client values as appropriate // TODO would be nice to make these final; not 100% sure volatile is needed as not changed after creation private transient volatile int numSamplesThreshold; private transient volatile long timeThresholdMs; private transient volatile boolean keyOnThreadName; // variables maintained by server code // @GuardedBy("sampleStore") private transient int sampleCount; // maintain separate count of samples for speed private transient long batchSendTime = -1; // @GuardedBy("sampleStore") /** * @deprecated only for use by test code */ @Deprecated public StatisticalSampleSender(){ this(null); log.warn("Constructor only intended for use in testing"); } /** * Constructor, only called by client code. * * @param listener that the List of sample events will be sent to. */ StatisticalSampleSender(RemoteSampleListener listener) { this.listener = listener; if (isClientConfigured()) { log.info("Using StatisticalSampleSender (client settings) for this run." + " Thresholds: num=" + clientConfiguredNumSamplesThreshold + ", time=" + clientConfiguredTimeThresholdMs + ". Key uses ThreadName: " + clientConfiguredKeyOnThreadName); } else { log.info("Using StatisticalSampleSender (server settings) for this run."); } } /** * Checks if any sample events are still present in the sampleStore and * sends them to the listener. Informs the listener that the test ended. * * @param host the hostname that the test has ended on. */ @Override public void testEnded(String host) { log.info("Test Ended on " + host); try { if (sampleStore.size() != 0) { sendBatch(); } listener.testEnded(host); } catch (RemoteException err) { log.warn("testEnded(hostname)", err); } } /** * Stores sample events until either a time or sample threshold is * breached. Both thresholds are reset if one fires. If only one threshold * is set it becomes the only value checked against. When a threshold is * breached the list of sample events is sent to a listener where the event * are fired locally. * * @param e a Sample Event */ @Override public void sampleOccurred(SampleEvent e) { synchronized (sampleStore) { // Locate the statistical sample collector String key = StatisticalSampleResult.getKey(e, keyOnThreadName); StatisticalSampleResult statResult = sampleTable.get(key); if (statResult == null) { statResult = new StatisticalSampleResult(e.getResult()); // store the new statistical result collector sampleTable.put(key, statResult); // add a new wrapper sampleevent sampleStore .add(new SampleEvent(statResult, e.getThreadGroup())); } statResult.add(e.getResult()); sampleCount++; boolean sendNow = false; if (numSamplesThreshold != -1) { if (sampleCount >= numSamplesThreshold) { sendNow = true; } } long now = 0; if (timeThresholdMs != -1) { now = System.currentTimeMillis(); // Checking for and creating initial timestamp to check against if (batchSendTime == -1) { this.batchSendTime = now + timeThresholdMs; } if (batchSendTime < now) { sendNow = true; } } if (sendNow) { try { if (log.isDebugEnabled()) { log.debug("Firing sample"); } sendBatch(); if (timeThresholdMs != -1) { this.batchSendTime = now + timeThresholdMs; } } catch (RemoteException err) { log.warn("sampleOccurred", err); } } } // synchronized(sampleStore) } private void sendBatch() throws RemoteException { if (sampleStore.size() > 0) { listener.processBatch(sampleStore); sampleStore.clear(); sampleTable.clear(); sampleCount = 0; } } /** * Processed by the RMI server code; acts as testStarted(). * @return this * @throws ObjectStreamException never */ private Object readResolve() throws ObjectStreamException{ if (isClientConfigured()) { numSamplesThreshold = clientConfiguredNumSamplesThreshold; timeThresholdMs = clientConfiguredTimeThresholdMs; keyOnThreadName = clientConfiguredKeyOnThreadName; } else { numSamplesThreshold = NUM_SAMPLES_THRESHOLD; timeThresholdMs = TIME_THRESHOLD_MS; keyOnThreadName = KEY_ON_THREADNAME; } log.info("Using StatisticalSampleSender for this run." + (isClientConfigured() ? " Client config: " : " Server config: ") + " Thresholds: num=" + numSamplesThreshold + ", time=" + timeThresholdMs + ". Key uses ThreadName: " + keyOnThreadName); return this; } }
apache-2.0
ern/elasticsearch
server/src/test/java/org/elasticsearch/index/mapper/IpFieldScriptTests.java
2716
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.mapper; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.script.AbstractFieldScript; import org.elasticsearch.script.IpFieldScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.equalTo; public class IpFieldScriptTests extends FieldScriptTestCase<IpFieldScript.Factory> { public static final IpFieldScript.Factory DUMMY = (fieldName, params, lookup) -> ctx -> new IpFieldScript( fieldName, params, lookup, ctx ) { @Override public void execute() { emit("192.168.0.1"); } }; @Override protected ScriptContext<IpFieldScript.Factory> context() { return IpFieldScript.CONTEXT; } @Override protected IpFieldScript.Factory dummyScript() { return DUMMY; } public void testTooManyValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{}")))); try (DirectoryReader reader = iw.getReader()) { IpFieldScript script = new IpFieldScript( "test", Map.of(), new SearchLookup(field -> null, (ft, lookup) -> null), reader.leaves().get(0) ) { @Override public void execute() { for (int i = 0; i <= AbstractFieldScript.MAX_VALUES; i++) { emit("192.168.0.1"); } } }; Exception e = expectThrows(IllegalArgumentException.class, script::execute); assertThat( e.getMessage(), equalTo("Runtime field [test] is emitting [101] values while the maximum number of values allowed is [100]") ); } } } }
apache-2.0
markcoble/droolsjbpm-integration
kie-server-parent/kie-server-api/src/main/java/org/kie/server/api/model/instance/RequestInfoInstance.java
5958
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.server.api.model.instance; import java.util.Date; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; @XmlAccessorType(XmlAccessType.FIELD) @XmlRootElement(name = "request-info-instance") public class RequestInfoInstance { @XmlElement(name="request-instance-id") private Long id; @XmlElement(name="request-status") private String status; @XmlElement(name="request-business-key") private String businessKey; @XmlElement(name="request-message") private String message; @XmlElement(name="request-retries") private Integer retries; @XmlElement(name="request-executions") private Integer executions; @XmlElement(name="request-command") private String commandName; @XmlElement(name="request-scheduled-date") private Date scheduledDate; @XmlElement(name="request-data") private Map<String, Object> data; @XmlElement(name="response-data") private Map<String, Object> responseData; @XmlElement(name="request-errors") private ErrorInfoInstanceList errors; public RequestInfoInstance() { } public static Builder builder() { return new Builder(); } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getBusinessKey() { return businessKey; } public void setBusinessKey(String businessKey) { this.businessKey = businessKey; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public Integer getRetries() { return retries; } public void setRetries(Integer retries) { this.retries = retries; } public Integer getExecutions() { return executions; } public void setExecutions(Integer executions) { this.executions = executions; } public String getCommandName() { return commandName; } public void setCommandName(String commandName) { this.commandName = commandName; } public Date getScheduledDate() { return scheduledDate; } public void setScheduledDate(Date scheduledDate) { this.scheduledDate = scheduledDate; } public Map<String, Object> getData() { return data; } public void setData(Map<String, Object> data) { this.data = data; } public ErrorInfoInstanceList getErrors() { return errors; } public void setErrors(ErrorInfoInstanceList errors) { this.errors = errors; } public Map<String, Object> getResponseData() { return responseData; } public void setResponseData(Map<String, Object> responseData) { this.responseData = responseData; } @Override public String toString() { return "RequestInfoInstance{" + "id=" + id + ", status='" + status + '\'' + ", businessKey='" + businessKey + '\'' + ", retries=" + retries + ", executions=" + executions + ", commandName='" + commandName + '\'' + ", scheduledDate=" + scheduledDate + '}'; } public static class Builder { private RequestInfoInstance requestInfoInstance = new RequestInfoInstance(); public RequestInfoInstance build() { return requestInfoInstance; } public Builder id(Long id) { requestInfoInstance.setId(id); return this; } public Builder status(String status) { requestInfoInstance.setStatus(status); return this; } public Builder businessKey(String businessKey) { requestInfoInstance.setBusinessKey(businessKey); return this; } public Builder message(String message) { requestInfoInstance.setMessage(message); return this; } public Builder retries(Integer retries) { requestInfoInstance.setRetries(retries); return this; } public Builder executions(Integer executions) { requestInfoInstance.setExecutions(executions); return this; } public Builder command(String command) { requestInfoInstance.setCommandName(command); return this; } public Builder scheduledDate(Date date) { requestInfoInstance.setScheduledDate(date == null ? date : new Date(date.getTime())); return this; } public Builder data(Map<String, Object> data) { requestInfoInstance.setData(data); return this; } public Builder responseData(Map<String, Object> data) { requestInfoInstance.setResponseData(data); return this; } public Builder errors(ErrorInfoInstanceList errors) { requestInfoInstance.setErrors(errors); return this; } } }
apache-2.0
telerik/phabricator
src/applications/calendar/storage/__tests__/PhabricatorCalendarHolidayTestCase.php
1038
<?php final class PhabricatorCalendarHolidayTestCase extends PhabricatorTestCase { protected function getPhabricatorTestCaseConfiguration() { return array( self::PHABRICATOR_TESTCONFIG_BUILD_STORAGE_FIXTURES => true, ); } protected function willRunTests() { parent::willRunTests(); id(new PhabricatorCalendarHoliday()) ->setDay('2012-01-02') ->setName('International Testing Day') ->save(); } public function testNthBusinessDay() { $map = array( array('2011-12-30', 1, '2012-01-03'), array('2012-01-01', 1, '2012-01-03'), array('2012-01-01', 0, '2012-01-01'), array('2012-01-01', -1, '2011-12-30'), array('2012-01-04', -1, '2012-01-03'), ); foreach ($map as $val) { list($date, $n, $expect) = $val; $actual = PhabricatorCalendarHoliday::getNthBusinessDay( strtotime($date), $n); $this->assertEqual( $expect, date('Y-m-d', $actual), "{$n} business days since '{$date}'"); } } }
apache-2.0
passerbyid/homebrew-core
Formula/hfstospell.rb
1127
class Hfstospell < Formula desc "Helsinki Finite-State Technology ospell" homepage "http://www.ling.helsinki.fi/kieliteknologia/tutkimus/hfst/" url "https://downloads.sourceforge.net/project/hfst/hfst/archive/hfstospell-0.3.0.tar.gz" sha256 "07b5b368882cac2399edb1bb6e2dd91450b56f732c25413a19fcfe194342d70c" bottle do cellar :any sha256 "70fe81e5ba05136921bf47c9e29053d1ab0c23ad729bc47daafc8b9fb0a311ab" => :sierra sha256 "9417cec27aed563db269d83402af875161724b10297ab78c4e69e4811c60866a" => :el_capitan sha256 "4dcc41f94c027f765b2d8e9e3859a72797d1d2f2e0e59b8f33ef47831dbcefea" => :yosemite sha256 "87cfbe776c920c653c7baf52d8492e6f2fc19a3c440026d09f0a8c05e3c26a87" => :mavericks end depends_on "pkg-config" => :build depends_on "libarchive" needs :cxx11 def install ENV.cxx11 system "./configure", "--disable-debug", "--disable-dependency-tracking", "--disable-silent-rules", "--prefix=#{prefix}" system "make", "install" end test do system "#{bin}/hfst-ospell", "--version" end end
bsd-2-clause
ASMlover/study
3rdparty/boost/include/boost/filesystem/path_traits.hpp
10379
// filesystem path_traits.hpp --------------------------------------------------------// // Copyright Beman Dawes 2009 // Distributed under the Boost Software License, Version 1.0. // See http://www.boost.org/LICENSE_1_0.txt // Library home page: http://www.boost.org/libs/filesystem #ifndef BOOST_FILESYSTEM_PATH_TRAITS_HPP #define BOOST_FILESYSTEM_PATH_TRAITS_HPP #include <boost/config.hpp> # if defined( BOOST_NO_STD_WSTRING ) # error Configuration not supported: Boost.Filesystem V3 and later requires std::wstring support # endif #include <boost/filesystem/config.hpp> #include <boost/type_traits/is_array.hpp> #include <boost/type_traits/decay.hpp> #include <boost/system/error_code.hpp> #include <boost/core/enable_if.hpp> #include <cwchar> // for mbstate_t #include <string> #include <vector> #include <list> #include <iterator> #include <locale> #include <boost/assert.hpp> // #include <iostream> //**** comment me out **** #include <boost/config/abi_prefix.hpp> // must be the last #include namespace boost { namespace filesystem { BOOST_FILESYSTEM_DECL const system::error_category& codecvt_error_category(); // uses std::codecvt_base::result used for error codes: // // ok: Conversion successful. // partial: Not all source characters converted; one or more additional source // characters are needed to produce the final target character, or the // size of the target intermediate buffer was too small to hold the result. // error: A character in the source could not be converted to the target encoding. // noconv: The source and target characters have the same type and encoding, so no // conversion was necessary. class directory_entry; namespace path_traits { typedef std::codecvt<wchar_t, char, std::mbstate_t> codecvt_type; // is_pathable type trait; allows disabling over-agressive class path member templates template <class T> struct is_pathable { static const bool value = false; }; template<> struct is_pathable<char*> { static const bool value = true; }; template<> struct is_pathable<const char*> { static const bool value = true; }; template<> struct is_pathable<wchar_t*> { static const bool value = true; }; template<> struct is_pathable<const wchar_t*> { static const bool value = true; }; template<> struct is_pathable<std::string> { static const bool value = true; }; template<> struct is_pathable<std::wstring> { static const bool value = true; }; template<> struct is_pathable<std::vector<char> > { static const bool value = true; }; template<> struct is_pathable<std::vector<wchar_t> > { static const bool value = true; }; template<> struct is_pathable<std::list<char> > { static const bool value = true; }; template<> struct is_pathable<std::list<wchar_t> > { static const bool value = true; }; template<> struct is_pathable<directory_entry> { static const bool value = true; }; // Pathable empty template <class Container> inline // disable_if aids broken compilers (IBM, old GCC, etc.) and is harmless for // conforming compilers. Replace by plain "bool" at some future date (2012?) typename boost::disable_if<boost::is_array<Container>, bool>::type empty(const Container & c) { return c.begin() == c.end(); } template <class T> inline bool empty(T * const & c_str) { BOOST_ASSERT(c_str); return !*c_str; } template <typename T, size_t N> inline bool empty(T (&x)[N]) { return !x[0]; } // value types differ ---------------------------------------------------------------// // // A from_end argument of 0 is less efficient than a known end, so use only if needed // with codecvt BOOST_FILESYSTEM_DECL void convert(const char* from, const char* from_end, // 0 for null terminated MBCS std::wstring & to, const codecvt_type& cvt); BOOST_FILESYSTEM_DECL void convert(const wchar_t* from, const wchar_t* from_end, // 0 for null terminated MBCS std::string & to, const codecvt_type& cvt); inline void convert(const char* from, std::wstring & to, const codecvt_type& cvt) { BOOST_ASSERT(from); convert(from, 0, to, cvt); } inline void convert(const wchar_t* from, std::string & to, const codecvt_type& cvt) { BOOST_ASSERT(from); convert(from, 0, to, cvt); } // without codecvt inline void convert(const char* from, const char* from_end, // 0 for null terminated MBCS std::wstring & to); inline void convert(const wchar_t* from, const wchar_t* from_end, // 0 for null terminated MBCS std::string & to); inline void convert(const char* from, std::wstring & to); inline void convert(const wchar_t* from, std::string & to); // value types same -----------------------------------------------------------------// // char with codecvt inline void convert(const char* from, const char* from_end, std::string & to, const codecvt_type&) { BOOST_ASSERT(from); BOOST_ASSERT(from_end); to.append(from, from_end); } inline void convert(const char* from, std::string & to, const codecvt_type&) { BOOST_ASSERT(from); to += from; } // wchar_t with codecvt inline void convert(const wchar_t* from, const wchar_t* from_end, std::wstring & to, const codecvt_type&) { BOOST_ASSERT(from); BOOST_ASSERT(from_end); to.append(from, from_end); } inline void convert(const wchar_t* from, std::wstring & to, const codecvt_type&) { BOOST_ASSERT(from); to += from; } // char without codecvt inline void convert(const char* from, const char* from_end, std::string & to) { BOOST_ASSERT(from); BOOST_ASSERT(from_end); to.append(from, from_end); } inline void convert(const char* from, std::string & to) { BOOST_ASSERT(from); to += from; } // wchar_t without codecvt inline void convert(const wchar_t* from, const wchar_t* from_end, std::wstring & to) { BOOST_ASSERT(from); BOOST_ASSERT(from_end); to.append(from, from_end); } inline void convert(const wchar_t* from, std::wstring & to) { BOOST_ASSERT(from); to += from; } // Source dispatch -----------------------------------------------------------------// // contiguous containers with codecvt template <class U> inline void dispatch(const std::string& c, U& to, const codecvt_type& cvt) { if (c.size()) convert(&*c.begin(), &*c.begin() + c.size(), to, cvt); } template <class U> inline void dispatch(const std::wstring& c, U& to, const codecvt_type& cvt) { if (c.size()) convert(&*c.begin(), &*c.begin() + c.size(), to, cvt); } template <class U> inline void dispatch(const std::vector<char>& c, U& to, const codecvt_type& cvt) { if (c.size()) convert(&*c.begin(), &*c.begin() + c.size(), to, cvt); } template <class U> inline void dispatch(const std::vector<wchar_t>& c, U& to, const codecvt_type& cvt) { if (c.size()) convert(&*c.begin(), &*c.begin() + c.size(), to, cvt); } // contiguous containers without codecvt template <class U> inline void dispatch(const std::string& c, U& to) { if (c.size()) convert(&*c.begin(), &*c.begin() + c.size(), to); } template <class U> inline void dispatch(const std::wstring& c, U& to) { if (c.size()) convert(&*c.begin(), &*c.begin() + c.size(), to); } template <class U> inline void dispatch(const std::vector<char>& c, U& to) { if (c.size()) convert(&*c.begin(), &*c.begin() + c.size(), to); } template <class U> inline void dispatch(const std::vector<wchar_t>& c, U& to) { if (c.size()) convert(&*c.begin(), &*c.begin() + c.size(), to); } // non-contiguous containers with codecvt template <class Container, class U> inline // disable_if aids broken compilers (IBM, old GCC, etc.) and is harmless for // conforming compilers. Replace by plain "void" at some future date (2012?) typename boost::disable_if<boost::is_array<Container>, void>::type dispatch(const Container & c, U& to, const codecvt_type& cvt) { if (c.size()) { std::basic_string<typename Container::value_type> s(c.begin(), c.end()); convert(s.c_str(), s.c_str()+s.size(), to, cvt); } } // c_str template <class T, class U> inline void dispatch(T * const & c_str, U& to, const codecvt_type& cvt) { // std::cout << "dispatch() const T *\n"; BOOST_ASSERT(c_str); convert(c_str, to, cvt); } // Note: there is no dispatch on C-style arrays because the array may // contain a string smaller than the array size. BOOST_FILESYSTEM_DECL void dispatch(const directory_entry & de, # ifdef BOOST_WINDOWS_API std::wstring & to, # else std::string & to, # endif const codecvt_type&); // non-contiguous containers without codecvt template <class Container, class U> inline // disable_if aids broken compilers (IBM, old GCC, etc.) and is harmless for // conforming compilers. Replace by plain "void" at some future date (2012?) typename boost::disable_if<boost::is_array<Container>, void>::type dispatch(const Container & c, U& to) { if (c.size()) { std::basic_string<typename Container::value_type> seq(c.begin(), c.end()); convert(seq.c_str(), seq.c_str()+seq.size(), to); } } // c_str template <class T, class U> inline void dispatch(T * const & c_str, U& to) { // std::cout << "dispatch() const T *\n"; BOOST_ASSERT(c_str); convert(c_str, to); } // Note: there is no dispatch on C-style arrays because the array may // contain a string smaller than the array size. BOOST_FILESYSTEM_DECL void dispatch(const directory_entry & de, # ifdef BOOST_WINDOWS_API std::wstring & to # else std::string & to # endif ); }}} // namespace boost::filesystem::path_traits #include <boost/config/abi_suffix.hpp> // pops abi_prefix.hpp pragmas #endif // BOOST_FILESYSTEM_PATH_TRAITS_HPP
bsd-2-clause
BenjaminHCCarr/homebrew-cask
Casks/beacon-scanner.rb
454
cask "beacon-scanner" do version "1.1.13" sha256 "5b28a510f557e57e5cf48490e982aadaba1676c34192e7d650da110a88d233bd" url "https://github.com/mlwelles/BeaconScanner/releases/download/#{version}/Beacon.Scanner.zip" appcast "https://github.com/mlwelles/BeaconScanner/releases.atom" name "BeaconScanner" desc "Utility to scan for iBeacon-compatible devices" homepage "https://github.com/mlwelles/BeaconScanner/" app "Beacon Scanner.app" end
bsd-2-clause
krattai/ss-middleware
mageplus/app/code/core/Mage/Adminhtml/Block/Catalog/Product/Helper/Form/Apply.php
2424
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Adminhtml * @copyright Copyright (c) 2012 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Attribute form apply element * * @category Mage * @package Mage_Adminhtml * @author Magento Core Team <[email protected]> */ class Mage_Adminhtml_Block_Catalog_Product_Helper_Form_Apply extends Varien_Data_Form_Element_Multiselect { public function getElementHtml() { $elementAttributeHtml = ''; if ($this->getReadonly()) { $elementAttributeHtml = $elementAttributeHtml . ' readonly="readonly"'; } if ($this->getDisabled()) { $elementAttributeHtml = $elementAttributeHtml . ' disabled="disabled"'; } $html = '<select onchange="toggleApplyVisibility(this)"' . $elementAttributeHtml . '>' . '<option value="0">' . $this->getModeLabels('all'). '</option>' . '<option value="1" ' . ($this->getValue()==null ? '' : 'selected') . '>' . $this->getModeLabels('custom'). '</option>' . '</select><br /><br />'; $html .= parent::getElementHtml(); return $html; } /** * Dublicate interface of Varien_Data_Form_Element_Abstract::setReadonly * * @param bool $readonly * @param bool $useDisabled * @return Mage_Adminhtml_Block_Catalog_Product_Helper_Form_Apply */ public function setReadonly($readonly, $useDisabled = false) { $this->setData('readonly', $readonly); $this->setData('disabled', $useDisabled); return $this; } }
bsd-2-clause
qizenguf/MLC-STT
src/cpu/testers/directedtest/SeriesRequestGenerator.cc
4092
/* * Copyright (c) 1999-2008 Mark D. Hill and David A. Wood * Copyright (c) 2009-2010 Advanced Micro Devices, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer; * redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution; * neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "cpu/testers/directedtest/SeriesRequestGenerator.hh" #include "base/random.hh" #include "base/trace.hh" #include "cpu/testers/directedtest/DirectedGenerator.hh" #include "cpu/testers/directedtest/RubyDirectedTester.hh" #include "debug/DirectedTest.hh" SeriesRequestGenerator::SeriesRequestGenerator(const Params *p) : DirectedGenerator(p), m_addr_increment_size(p->addr_increment_size), m_percent_writes(p->percent_writes) { m_status = SeriesRequestGeneratorStatus_Thinking; m_active_node = 0; m_address = 0x0; } SeriesRequestGenerator::~SeriesRequestGenerator() { } bool SeriesRequestGenerator::initiate() { DPRINTF(DirectedTest, "initiating request\n"); assert(m_status == SeriesRequestGeneratorStatus_Thinking); MasterPort* port = m_directed_tester->getCpuPort(m_active_node); Request::Flags flags; // For simplicity, requests are assumed to be 1 byte-sized Request *req = new Request(m_address, 1, flags, masterId); Packet::Command cmd; bool do_write = (random_mt.random(0, 100) < m_percent_writes); if (do_write) { cmd = MemCmd::WriteReq; } else { cmd = MemCmd::ReadReq; } PacketPtr pkt = new Packet(req, cmd); pkt->allocate(); if (port->sendTimingReq(pkt)) { DPRINTF(DirectedTest, "initiating request - successful\n"); m_status = SeriesRequestGeneratorStatus_Request_Pending; return true; } else { // If the packet did not issue, must delete // Note: No need to delete the data, the packet destructor // will delete it delete pkt->req; delete pkt; DPRINTF(DirectedTest, "failed to initiate request - sequencer not ready\n"); return false; } } void SeriesRequestGenerator::performCallback(uint32_t proc, Addr address) { assert(m_active_node == proc); assert(m_address == address); assert(m_status == SeriesRequestGeneratorStatus_Request_Pending); m_status = SeriesRequestGeneratorStatus_Thinking; m_active_node++; if (m_active_node == m_num_cpus) { // // Cycle of requests completed, increment cycle completions and restart // at cpu zero // m_directed_tester->incrementCycleCompletions(); m_address += m_addr_increment_size; m_active_node = 0; } } SeriesRequestGenerator * SeriesRequestGeneratorParams::create() { return new SeriesRequestGenerator(this); }
bsd-3-clause
pozdnyakov/chromium-crosswalk
chrome/renderer/resources/extensions/ad_view_deny.js
570
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. var watchForTag = require("tagWatcher").watchForTag; // Output error message to console when using <adview> tag with no permission. var errorMessage = "You do not have permission to use <adview> tag." + " Be sure to declare 'adview' permission in your manifest."; window.addEventListener('DOMContentLoaded', function() { watchForTag('ADVIEW', function() { console.error(errorMessage); }); });
bsd-3-clause
yanqingmen/re2
re2/testing/parse_test.cc
16380
// Copyright 2006 The RE2 Authors. All Rights Reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Test parse.cc, dump.cc, and tostring.cc. #include <string> #include <vector> #include "util/test.h" #include "re2/regexp.h" namespace re2 { static const Regexp::ParseFlags TestZeroFlags = Regexp::ParseFlags(1<<30); struct Test { const char* regexp; const char* parse; Regexp::ParseFlags flags; }; static Regexp::ParseFlags kTestFlags = Regexp::MatchNL | Regexp::PerlX | Regexp::PerlClasses | Regexp::UnicodeGroups; static Test tests[] = { // Base cases { "a", "lit{a}" }, { "a.", "cat{lit{a}dot{}}" }, { "a.b", "cat{lit{a}dot{}lit{b}}" }, { "ab", "str{ab}" }, { "a.b.c", "cat{lit{a}dot{}lit{b}dot{}lit{c}}" }, { "abc", "str{abc}" }, { "a|^", "alt{lit{a}bol{}}" }, { "a|b", "cc{0x61-0x62}" }, { "(a)", "cap{lit{a}}" }, { "(a)|b", "alt{cap{lit{a}}lit{b}}" }, { "a*", "star{lit{a}}" }, { "a+", "plus{lit{a}}" }, { "a?", "que{lit{a}}" }, { "a{2}", "rep{2,2 lit{a}}" }, { "a{2,3}", "rep{2,3 lit{a}}" }, { "a{2,}", "rep{2,-1 lit{a}}" }, { "a*?", "nstar{lit{a}}" }, { "a+?", "nplus{lit{a}}" }, { "a??", "nque{lit{a}}" }, { "a{2}?", "nrep{2,2 lit{a}}" }, { "a{2,3}?", "nrep{2,3 lit{a}}" }, { "a{2,}?", "nrep{2,-1 lit{a}}" }, { "", "emp{}" }, { "|", "emp{}" }, // alt{emp{}emp{}} but got factored { "|x|", "alt{emp{}lit{x}emp{}}" }, { ".", "dot{}" }, { "^", "bol{}" }, { "$", "eol{}" }, { "\\|", "lit{|}" }, { "\\(", "lit{(}" }, { "\\)", "lit{)}" }, { "\\*", "lit{*}" }, { "\\+", "lit{+}" }, { "\\?", "lit{?}" }, { "{", "lit{{}" }, { "}", "lit{}}" }, { "\\.", "lit{.}" }, { "\\^", "lit{^}" }, { "\\$", "lit{$}" }, { "\\\\", "lit{\\}" }, { "[ace]", "cc{0x61 0x63 0x65}" }, { "[abc]", "cc{0x61-0x63}" }, { "[a-z]", "cc{0x61-0x7a}" }, { "[a]", "lit{a}" }, { "\\-", "lit{-}" }, { "-", "lit{-}" }, { "\\_", "lit{_}" }, // Posix and Perl extensions { "[[:lower:]]", "cc{0x61-0x7a}" }, { "[a-z]", "cc{0x61-0x7a}" }, { "[^[:lower:]]", "cc{0-0x60 0x7b-0x10ffff}" }, { "[[:^lower:]]", "cc{0-0x60 0x7b-0x10ffff}" }, { "(?i)[[:lower:]]", "cc{0x41-0x5a 0x61-0x7a 0x17f 0x212a}" }, { "(?i)[a-z]", "cc{0x41-0x5a 0x61-0x7a 0x17f 0x212a}" }, { "(?i)[^[:lower:]]", "cc{0-0x40 0x5b-0x60 0x7b-0x17e 0x180-0x2129 0x212b-0x10ffff}" }, { "(?i)[[:^lower:]]", "cc{0-0x40 0x5b-0x60 0x7b-0x17e 0x180-0x2129 0x212b-0x10ffff}" }, { "\\d", "cc{0x30-0x39}" }, { "\\D", "cc{0-0x2f 0x3a-0x10ffff}" }, { "\\s", "cc{0x9-0xa 0xc-0xd 0x20}" }, { "\\S", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}" }, { "\\w", "cc{0x30-0x39 0x41-0x5a 0x5f 0x61-0x7a}" }, { "\\W", "cc{0-0x2f 0x3a-0x40 0x5b-0x5e 0x60 0x7b-0x10ffff}" }, { "(?i)\\w", "cc{0x30-0x39 0x41-0x5a 0x5f 0x61-0x7a 0x17f 0x212a}" }, { "(?i)\\W", "cc{0-0x2f 0x3a-0x40 0x5b-0x5e 0x60 0x7b-0x17e 0x180-0x2129 0x212b-0x10ffff}" }, { "[^\\\\]", "cc{0-0x5b 0x5d-0x10ffff}" }, { "\\C", "byte{}" }, // Unicode, negatives, and a double negative. { "\\p{Braille}", "cc{0x2800-0x28ff}" }, { "\\P{Braille}", "cc{0-0x27ff 0x2900-0x10ffff}" }, { "\\p{^Braille}", "cc{0-0x27ff 0x2900-0x10ffff}" }, { "\\P{^Braille}", "cc{0x2800-0x28ff}" }, // More interesting regular expressions. { "a{,2}", "str{a{,2}}" }, { "\\.\\^\\$\\\\", "str{.^$\\}" }, { "[a-zABC]", "cc{0x41-0x43 0x61-0x7a}" }, { "[^a]", "cc{0-0x60 0x62-0x10ffff}" }, { "[\xce\xb1-\xce\xb5\xe2\x98\xba]", "cc{0x3b1-0x3b5 0x263a}" }, // utf-8 { "a*{", "cat{star{lit{a}}lit{{}}" }, // Test precedences { "(?:ab)*", "star{str{ab}}" }, { "(ab)*", "star{cap{str{ab}}}" }, { "ab|cd", "alt{str{ab}str{cd}}" }, { "a(b|c)d", "cat{lit{a}cap{cc{0x62-0x63}}lit{d}}" }, // Test flattening. { "(?:a)", "lit{a}" }, { "(?:ab)(?:cd)", "str{abcd}" }, { "(?:a|b)|(?:c|d)", "cc{0x61-0x64}" }, { "a|c", "cc{0x61 0x63}" }, { "a|[cd]", "cc{0x61 0x63-0x64}" }, { "a|.", "dot{}" }, { "[ab]|c", "cc{0x61-0x63}" }, { "[ab]|[cd]", "cc{0x61-0x64}" }, { "[ab]|.", "dot{}" }, { ".|c", "dot{}" }, { ".|[cd]", "dot{}" }, { ".|.", "dot{}" }, // Test Perl quoted literals { "\\Q+|*?{[\\E", "str{+|*?{[}" }, { "\\Q+\\E+", "plus{lit{+}}" }, { "\\Q\\\\E", "lit{\\}" }, { "\\Q\\\\\\E", "str{\\\\}" }, // Test Perl \A and \z { "(?m)^", "bol{}" }, { "(?m)$", "eol{}" }, { "(?-m)^", "bot{}" }, { "(?-m)$", "eot{}" }, { "(?m)\\A", "bot{}" }, { "(?m)\\z", "eot{\\z}" }, { "(?-m)\\A", "bot{}" }, { "(?-m)\\z", "eot{\\z}" }, // Test named captures { "(?P<name>a)", "cap{name:lit{a}}" }, // Case-folded literals { "[Aa]", "litfold{a}" }, // Strings { "abcde", "str{abcde}" }, { "[Aa][Bb]cd", "cat{strfold{ab}str{cd}}" }, // Reported bug involving \n leaking in despite use of NeverNL. { "[^ ]", "cc{0-0x9 0xb-0x1f 0x21-0x10ffff}", TestZeroFlags }, { "[^ ]", "cc{0-0x9 0xb-0x1f 0x21-0x10ffff}", Regexp::FoldCase }, { "[^ ]", "cc{0-0x9 0xb-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ ]", "cc{0-0x9 0xb-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^ \f]", "cc{0-0x9 0xb 0xd-0x1f 0x21-0x10ffff}", TestZeroFlags }, { "[^ \f]", "cc{0-0x9 0xb 0xd-0x1f 0x21-0x10ffff}", Regexp::FoldCase }, { "[^ \f]", "cc{0-0x9 0xb 0xd-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ \f]", "cc{0-0x9 0xb 0xd-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^ \r]", "cc{0-0x9 0xb-0xc 0xe-0x1f 0x21-0x10ffff}", TestZeroFlags }, { "[^ \r]", "cc{0-0x9 0xb-0xc 0xe-0x1f 0x21-0x10ffff}", Regexp::FoldCase }, { "[^ \r]", "cc{0-0x9 0xb-0xc 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ \r]", "cc{0-0x9 0xb-0xc 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^ \v]", "cc{0-0x9 0xc-0x1f 0x21-0x10ffff}", TestZeroFlags }, { "[^ \v]", "cc{0-0x9 0xc-0x1f 0x21-0x10ffff}", Regexp::FoldCase }, { "[^ \v]", "cc{0-0x9 0xc-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ \v]", "cc{0-0x9 0xc-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^ \t]", "cc{0-0x8 0xb-0x1f 0x21-0x10ffff}", TestZeroFlags }, { "[^ \t]", "cc{0-0x8 0xb-0x1f 0x21-0x10ffff}", Regexp::FoldCase }, { "[^ \t]", "cc{0-0x8 0xb-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ \t]", "cc{0-0x8 0xb-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^ \r\f\v]", "cc{0-0x9 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ \r\f\v]", "cc{0-0x9 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^ \r\f\t\v]", "cc{0-0x8 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ \r\f\t\v]", "cc{0-0x8 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^ \r\n\f\t\v]", "cc{0-0x8 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ \r\n\f\t\v]", "cc{0-0x8 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^ \r\n\f\t]", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL }, { "[^ \r\n\f\t]", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::NeverNL | Regexp::FoldCase }, { "[^\t-\n\f-\r ]", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::PerlClasses }, { "[^\t-\n\f-\r ]", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::PerlClasses | Regexp::FoldCase }, { "[^\t-\n\f-\r ]", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::PerlClasses | Regexp::NeverNL }, { "[^\t-\n\f-\r ]", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::PerlClasses | Regexp::NeverNL | Regexp::FoldCase }, { "\\S", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::PerlClasses }, { "\\S", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::PerlClasses | Regexp::FoldCase }, { "\\S", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::PerlClasses | Regexp::NeverNL }, { "\\S", "cc{0-0x8 0xb 0xe-0x1f 0x21-0x10ffff}", Regexp::PerlClasses | Regexp::NeverNL | Regexp::FoldCase }, }; bool RegexpEqualTestingOnly(Regexp* a, Regexp* b) { return Regexp::Equal(a, b); } void TestParse(const Test* tests, int ntests, Regexp::ParseFlags flags, const string& title) { Regexp** re = new Regexp*[ntests]; for (int i = 0; i < ntests; i++) { RegexpStatus status; Regexp::ParseFlags f = flags; if (tests[i].flags != 0) { f = tests[i].flags & ~TestZeroFlags; } re[i] = Regexp::Parse(tests[i].regexp, f, &status); CHECK(re[i] != NULL) << " " << tests[i].regexp << " " << status.Text(); string s = re[i]->Dump(); EXPECT_EQ(string(tests[i].parse), s) << "Regexp: " << tests[i].regexp << "\nparse: " << string(tests[i].parse) << " s: " << s << " flag=" << f; } for (int i = 0; i < ntests; i++) { for (int j = 0; j < ntests; j++) { EXPECT_EQ(string(tests[i].parse) == string(tests[j].parse), RegexpEqualTestingOnly(re[i], re[j])) << "Regexp: " << tests[i].regexp << " " << tests[j].regexp; } } for (int i = 0; i < ntests; i++) re[i]->Decref(); delete[] re; } // Test that regexps parse to expected structures. TEST(TestParse, SimpleRegexps) { TestParse(tests, arraysize(tests), kTestFlags, "simple"); } Test foldcase_tests[] = { { "AbCdE", "strfold{abcde}" }, { "[Aa]", "litfold{a}" }, { "a", "litfold{a}" }, // 0x17F is an old English long s (looks like an f) and folds to s. // 0x212A is the Kelvin symbol and folds to k. { "A[F-g]", "cat{litfold{a}cc{0x41-0x7a 0x17f 0x212a}}" }, // [Aa][A-z...] { "[[:upper:]]", "cc{0x41-0x5a 0x61-0x7a 0x17f 0x212a}" }, { "[[:lower:]]", "cc{0x41-0x5a 0x61-0x7a 0x17f 0x212a}" }, }; // Test that parsing with FoldCase works. TEST(TestParse, FoldCase) { TestParse(foldcase_tests, arraysize(foldcase_tests), Regexp::FoldCase, "foldcase"); } Test literal_tests[] = { { "(|)^$.[*+?]{5,10},\\", "str{(|)^$.[*+?]{5,10},\\}" }, }; // Test that parsing with Literal works. TEST(TestParse, Literal) { TestParse(literal_tests, arraysize(literal_tests), Regexp::Literal, "literal"); } Test matchnl_tests[] = { { ".", "dot{}" }, { "\n", "lit{\n}" }, { "[^a]", "cc{0-0x60 0x62-0x10ffff}" }, { "[a\\n]", "cc{0xa 0x61}" }, }; // Test that parsing with MatchNL works. // (Also tested above during simple cases.) TEST(TestParse, MatchNL) { TestParse(matchnl_tests, arraysize(matchnl_tests), Regexp::MatchNL, "with MatchNL"); } Test nomatchnl_tests[] = { { ".", "cc{0-0x9 0xb-0x10ffff}" }, { "\n", "lit{\n}" }, { "[^a]", "cc{0-0x9 0xb-0x60 0x62-0x10ffff}" }, { "[a\\n]", "cc{0xa 0x61}" }, }; // Test that parsing without MatchNL works. TEST(TestParse, NoMatchNL) { TestParse(nomatchnl_tests, arraysize(nomatchnl_tests), Regexp::NoParseFlags, "without MatchNL"); } Test prefix_tests[] = { { "abc|abd", "cat{str{ab}cc{0x63-0x64}}" }, { "a(?:b)c|abd", "cat{str{ab}cc{0x63-0x64}}" }, { "abc|abd|aef|bcx|bcy", "alt{cat{lit{a}alt{cat{lit{b}cc{0x63-0x64}}str{ef}}}" "cat{str{bc}cc{0x78-0x79}}}" }, { "abc|x|abd", "alt{str{abc}lit{x}str{abd}}" }, { "(?i)abc|ABD", "cat{strfold{ab}cc{0x43-0x44 0x63-0x64}}" }, { "[ab]c|[ab]d", "cat{cc{0x61-0x62}cc{0x63-0x64}}" }, { "(?:xx|yy)c|(?:xx|yy)d", "cat{alt{str{xx}str{yy}}cc{0x63-0x64}}" }, { "x{2}|x{2}[0-9]", "cat{rep{2,2 lit{x}}alt{emp{}cc{0x30-0x39}}}" }, { "x{2}y|x{2}[0-9]y", "cat{rep{2,2 lit{x}}alt{lit{y}cat{cc{0x30-0x39}lit{y}}}}" }, { "n|r|rs", "alt{lit{n}cat{lit{r}alt{emp{}lit{s}}}}" }, { "n|rs|r", "alt{lit{n}cat{lit{r}alt{lit{s}emp{}}}}" }, { "r|rs|n", "alt{cat{lit{r}alt{emp{}lit{s}}}lit{n}}" }, { "rs|r|n", "alt{cat{lit{r}alt{lit{s}emp{}}}lit{n}}" }, }; // Test that prefix factoring works. TEST(TestParse, Prefix) { TestParse(prefix_tests, arraysize(prefix_tests), Regexp::PerlX, "prefix"); } Test nested_tests[] = { { "((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}))", "cap{cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 lit{x}}}}}}}}}}}}}}}}}}}}" }, { "((((((((((x{1}){2}){2}){2}){2}){2}){2}){2}){2}){2})", "cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{1,1 lit{x}}}}}}}}}}}}}}}}}}}}}" }, { "((((((((((x{0}){2}){2}){2}){2}){2}){2}){2}){2}){2})", "cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 cap{rep{0,0 lit{x}}}}}}}}}}}}}}}}}}}}}" }, { "((((((x{2}){2}){2}){5}){5}){5})", "cap{rep{5,5 cap{rep{5,5 cap{rep{5,5 cap{rep{2,2 cap{rep{2,2 cap{rep{2,2 lit{x}}}}}}}}}}}}}" }, }; // Test that nested repetition works. TEST(TestParse, Nested) { TestParse(nested_tests, arraysize(nested_tests), Regexp::PerlX, "nested"); } // Invalid regular expressions const char* badtests[] = { "(", ")", "(a", "(a|b|", "(a|b", "[a-z", "([a-z)", "x{1001}", "\xff", // Invalid UTF-8 "[\xff]", "[\\\xff]", "\\\xff", "(?P<name>a", "(?P<name>", "(?P<name", "(?P<x y>a)", "(?P<>a)", "[a-Z]", "(?i)[a-Z]", "a{100000}", "a{100000,}", "((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})", "(((x{7}){11}){13})", }; // Valid in Perl, bad in POSIX const char* only_perl[] = { "[a-b-c]", "\\Qabc\\E", "\\Q*+?{[\\E", "\\Q\\\\E", "\\Q\\\\\\E", "\\Q\\\\\\\\E", "\\Q\\\\\\\\\\E", "(?:a)", "(?P<name>a)", }; // Valid in POSIX, bad in Perl. const char* only_posix[] = { "a++", "a**", "a?*", "a+*", "a{1}*", }; // Test that parser rejects bad regexps. TEST(TestParse, InvalidRegexps) { for (int i = 0; i < arraysize(badtests); i++) { CHECK(Regexp::Parse(badtests[i], Regexp::PerlX, NULL) == NULL) << " " << badtests[i]; CHECK(Regexp::Parse(badtests[i], Regexp::NoParseFlags, NULL) == NULL) << " " << badtests[i]; } for (int i = 0; i < arraysize(only_posix); i++) { CHECK(Regexp::Parse(only_posix[i], Regexp::PerlX, NULL) == NULL) << " " << only_posix[i]; Regexp* re = Regexp::Parse(only_posix[i], Regexp::NoParseFlags, NULL); CHECK(re) << " " << only_posix[i]; re->Decref(); } for (int i = 0; i < arraysize(only_perl); i++) { CHECK(Regexp::Parse(only_perl[i], Regexp::NoParseFlags, NULL) == NULL) << " " << only_perl[i]; Regexp* re = Regexp::Parse(only_perl[i], Regexp::PerlX, NULL); CHECK(re) << " " << only_perl[i]; re->Decref(); } } // Test that ToString produces original regexp or equivalent one. TEST(TestToString, EquivalentParse) { for (int i = 0; i < arraysize(tests); i++) { RegexpStatus status; Regexp::ParseFlags f = kTestFlags; if (tests[i].flags != 0) { f = tests[i].flags & ~TestZeroFlags; } Regexp* re = Regexp::Parse(tests[i].regexp, f, &status); CHECK(re != NULL) << " " << tests[i].regexp << " " << status.Text(); string s = re->Dump(); EXPECT_EQ(string(tests[i].parse), s) << " " << tests[i].regexp << " " << string(tests[i].parse) << " " << s; string t = re->ToString(); if (t != tests[i].regexp) { // If ToString didn't return the original regexp, // it must have found one with fewer parens. // Unfortunately we can't check the length here, because // ToString produces "\\{" for a literal brace, // but "{" is a shorter equivalent. // CHECK_LT(t.size(), strlen(tests[i].regexp)) // << " t=" << t << " regexp=" << tests[i].regexp; // Test that if we parse the new regexp we get the same structure. Regexp* nre = Regexp::Parse(t, Regexp::MatchNL | Regexp::PerlX, &status); CHECK(nre != NULL) << " reparse " << t << " " << status.Text(); string ss = nre->Dump(); string tt = nre->ToString(); if (s != ss || t != tt) LOG(INFO) << "ToString(" << tests[i].regexp << ") = " << t; EXPECT_EQ(s, ss); EXPECT_EQ(t, tt); nre->Decref(); } re->Decref(); } } // Test that capture error args are correct. TEST(NamedCaptures, ErrorArgs) { RegexpStatus status; Regexp* re; re = Regexp::Parse("test(?P<name", Regexp::LikePerl, &status); EXPECT_TRUE(re == NULL); EXPECT_EQ(status.code(), kRegexpBadNamedCapture); EXPECT_EQ(status.error_arg(), "(?P<name"); re = Regexp::Parse("test(?P<space bar>z)", Regexp::LikePerl, &status); EXPECT_TRUE(re == NULL); EXPECT_EQ(status.code(), kRegexpBadNamedCapture); EXPECT_EQ(status.error_arg(), "(?P<space bar>"); } } // namespace re2
bsd-3-clause
pobsuwan/lmms
vendor/patchwork/utf8/src/Patchwork/Utf8/Compiler.php
11865
<?php /* * Copyright (C) 2013 Nicolas Grekas - [email protected] * * This library is free software; you can redistribute it and/or modify it * under the terms of the (at your option): * Apache License v2.0 (http://apache.org/licenses/LICENSE-2.0.txt), or * GNU General Public License v2.0 (http://gnu.org/licenses/gpl-2.0.txt). */ namespace Patchwork\Utf8; /** * Compiler is a use once class that implements the compilation of unicode * and charset data to a format suitable for other Utf8 classes. * * See http://unicode.org/Public/UNIDATA/ for unicode data * See http://unicode.org/Public/MAPPINGS/ for charset conversion maps * See http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WindowsBestFit/ for mappings * See http://unicode.org/repos/cldr/trunk/common/transforms/ for Latin-ASCII.xml */ class Compiler { public static function charsetMaps($out_dir, $map_dir = null) { isset($map_dir) or $map_dir = __DIR__.'/unicode/charset/'; $h = opendir($map_dir); while (false !== $f = readdir($h)) { if (false === strpos($f, '.') && is_file($map_dir.$f)) { $from = $to = array(); $data = file_get_contents($map_dir.$f); if ('gsm0338' === $f) { $rx = '/^#?0x([0-9A-F]+)[ \t]+0x([0-9A-F]+)/mi'; } else { $rx = '/^0x([0-9A-F]+)[ \t]+0x([0-9A-F]+)/mi'; } preg_match_all($rx, $data, $data, PREG_SET_ORDER); if ('nextstep' === $f) { $from = array_map('chr', range(0, 127)); $from = array_combine($from, $from); } elseif ('mazovia' === $f) { $from = array("\x9B" => self::chr(0x007A).self::chr(0x0142)); $to = array(self::chr(0x203A) => "\x9B"); } foreach ($data as $data) { $data = array_map('hexdec', $data); $data[2] = self::chr($data[2]); $data[1] = $data[1] > 255 ? chr($data[1] >> 8).chr($data[1] % 256) : chr($data[1]); if (isset($from[$data[1]])) { isset($to[$data[2]]) or $to[$data[2]] = $data[1]; } else { $from[$data[1]] = $data[2]; } } file_put_contents("{$out_dir}from.{$f}.ser", serialize($from)); if ($to) { $to += array_flip($from); file_put_contents("{$out_dir}to.{$f}.ser", serialize($to)); } } } closedir($h); } public static function translitMap($out_dir) { $map = array(); $h = fopen(self::getFile('UnicodeData.txt'), 'rt'); while (false !== $line = fgets($h)) { $m = array(); if (preg_match('/^([^;]*);[^;]*;[^;]*;[^;]*;[^;]*;<(circle|compat|font|fraction|narrow|small|square|wide)> ([^;]*);/', $line, $m)) { $m[1] = self::chr(hexdec($m[1])); $m[3] = explode(' ', $m[3]); $m[3] = array_map('hexdec', $m[3]); $m[3] = array_map(array(__CLASS__, 'chr'), $m[3]); $m[3] = implode('', $m[3]); switch ($m[2]) { case 'compat': if (' ' === $m[3][0]) { continue 2; } break; case 'circle': $m[3] = '('.$m[3].')'; break; case 'fraction': $m[3] = ' '.$m[3].' '; break; } $m = array($m[1], $m[3]); } elseif (preg_match('/^([^;]*);CJK COMPATIBILITY IDEOGRAPH-[^;]*;[^;]*;[^;]*;[^;]*;([^;]*);/', $line, $m)) { $m = array( self::chr(hexdec($m[1])), self::chr(hexdec($m[2])), ); } if (!$m) { continue; } $map[$m[0]] = $m[1]; } fclose($h); foreach (file(self::getFile('Latin-ASCII.xml')) as $line) { if (preg_match('/<tRule>(.) → (.*?) ;/u', $line, $m)) { if ('\u' === $m[1][0]) { $m[1] = self::chr(hexdec(substr($m[1], 2))); } $m[2] = htmlspecialchars_decode($m[2]); switch ($m[2][0]) { case '\\': $m[2] = substr($m[2], 1); break; case "'": $m[2] = substr($m[2], 1, -1); break; } isset($map[$m[1]]) or $map[$m[1]] = $m[2]; } } file_put_contents($out_dir.'translit.ser', serialize($map)); } public static function bestFit($out_dir, $map_dir = null) { isset($map_dir) or $map_dir = __DIR__.'/unicode/charset/'; $dh = opendir($map_dir); while (false !== $f = readdir($dh)) { if (0 === strpos($f, 'bestfit') && preg_match('/^bestfit\d+\.txt$/D', $f)) { $from = array(); $to = array(); $lead = ''; $h = fopen($map_dir.$f, 'rb'); while (false !== $s = fgets($h)) { if (0 === strpos($s, 'WCTABLE')) { break; } if (0 === strpos($s, 'DBCSTABLE')) { $lead = substr(rtrim($s), -2); $lead = chr(hexdec($lead)); } elseif (preg_match("/^0x([0-9a-f]{2})\t0x([0-9a-f]{4})/", $s, $s)) { $s = array_map('hexdec', $s); $from[$lead.chr($s[1])] = self::chr($s[2]); } } while (false !== $s = fgets($h)) { if (0 === strpos($s, 'ENDCODEPAGE')) { break; } $s = explode("\t", rtrim($s)); if (isset($s[1])) { $s[0] = substr($s[0], 2); $s[1] = substr($s[1], 2); $s = array_map('hexdec', $s); $s[1] = $s[1] > 255 ? chr($s[1] >> 8).chr($s[1] % 256) : chr($s[1]); $to[self::chr($s[0])] = $s[1]; } } fclose($h); // file_put_contents($out_dir . 'from.' . substr($f, 0, -3) .'ser', serialize($from)); file_put_contents($out_dir.'to.'.substr($f, 0, -3).'ser', serialize($to)); } } closedir($dh); } // Write unicode data maps to disk public static function unicodeMaps($out_dir) { $upperCase = array(); $lowerCase = array(); $caseFolding = array(); $combiningClass = array(); $canonicalComposition = array(); $canonicalDecomposition = array(); $compatibilityDecomposition = array(); $exclusion = array(); $h = fopen(self::getFile('CompositionExclusions.txt'), 'rt'); while (false !== $m = fgets($h)) { if (preg_match('/^(?:# )?([0-9A-F]+) /', $m, $m)) { $exclusion[self::chr(hexdec($m[1]))] = 1; } } fclose($h); $h = fopen(self::getFile('UnicodeData.txt'), 'rt'); while (false !== $m = fgets($h)) { $m = explode(';', $m); $k = self::chr(hexdec($m[0])); $combClass = (int) $m[3]; $decomp = $m[5]; $m[12] && $m[12] != $m[0] and $upperCase[$k] = self::chr(hexdec($m[12])); $m[13] && $m[13] != $m[0] and $lowerCase[$k] = self::chr(hexdec($m[13])); $combClass and $combiningClass[$k] = $combClass; if ($decomp) { $canonic = '<' != $decomp[0]; $canonic or $decomp = preg_replace("'^<.*> '", '', $decomp); $decomp = explode(' ', $decomp); $exclude = count($decomp) == 1 || isset($exclusion[$k]); $decomp = array_map('hexdec', $decomp); $decomp = array_map(array(__CLASS__, 'chr'), $decomp); $decomp = implode('', $decomp); if ($canonic) { $canonicalDecomposition[$k] = $decomp; $exclude or $canonicalComposition[$decomp] = $k; } $compatibilityDecomposition[$k] = $decomp; } } fclose($h); do { $m = 0; foreach ($canonicalDecomposition as $k => $decomp) { $h = strtr($decomp, $canonicalDecomposition); if ($h != $decomp) { $canonicalDecomposition[$k] = $h; $m = 1; } } } while ($m); do { $m = 0; foreach ($compatibilityDecomposition as $k => $decomp) { $h = strtr($decomp, $compatibilityDecomposition); if ($h != $decomp) { $compatibilityDecomposition[$k] = $h; $m = 1; } } } while ($m); foreach ($compatibilityDecomposition as $k => $decomp) { if (isset($canonicalDecomposition[$k]) && $canonicalDecomposition[$k] == $decomp) { unset($compatibilityDecomposition[$k]); } } $h = fopen(self::getFile('CaseFolding.txt'), 'rt'); while (false !== $m = fgets($h)) { if (preg_match('/^([0-9A-F]+); ([CFST]); ([0-9A-F]+(?: [0-9A-F]+)*)/', $m, $m)) { $k = self::chr(hexdec($m[1])); $decomp = explode(' ', $m[3]); $decomp = array_map('hexdec', $decomp); $decomp = array_map(array(__CLASS__, 'chr'), $decomp); $decomp = implode('', $decomp); @($lowerCase[$k] != $decomp and $caseFolding[$m[2]][$k] = $decomp); } } fclose($h); // Only full case folding is worth serializing $caseFolding = array( array_keys($caseFolding['F']), array_values($caseFolding['F']), ); $upperCase = serialize($upperCase); $lowerCase = serialize($lowerCase); $caseFolding = serialize($caseFolding); $combiningClass = serialize($combiningClass); $canonicalComposition = serialize($canonicalComposition); $canonicalDecomposition = serialize($canonicalDecomposition); $compatibilityDecomposition = serialize($compatibilityDecomposition); file_put_contents($out_dir.'upperCase.ser', $upperCase); file_put_contents($out_dir.'lowerCase.ser', $lowerCase); file_put_contents($out_dir.'caseFolding_full.ser', $caseFolding); file_put_contents($out_dir.'combiningClass.ser', $combiningClass); file_put_contents($out_dir.'canonicalComposition.ser', $canonicalComposition); file_put_contents($out_dir.'canonicalDecomposition.ser', $canonicalDecomposition); file_put_contents($out_dir.'compatibilityDecomposition.ser', $compatibilityDecomposition); } protected static function chr($c) { $c %= 0x200000; return $c < 0x80 ? chr($c) : ( $c < 0x800 ? chr(0xC0 | $c >> 6).chr(0x80 | $c & 0x3F) : ( $c < 0x10000 ? chr(0xE0 | $c >> 12).chr(0x80 | $c >> 6 & 0x3F).chr(0x80 | $c & 0x3F) : ( chr(0xF0 | $c >> 18).chr(0x80 | $c >> 12 & 0x3F).chr(0x80 | $c >> 6 & 0x3F).chr(0x80 | $c & 0x3F) ))); } protected static function getFile($file) { return __DIR__.'/unicode/data/'.$file; } }
mit
ECHOInternational/refinerycms-patch
resources/app/helpers/refinery/admin/resources_helper.rb
76
module Refinery module Admin module ResourcesHelper end end end
mit
stepancar/DefinitelyTyped
codemirror/codemirror.d.ts
61207
// Type definitions for CodeMirror // Project: https://github.com/marijnh/CodeMirror // Definitions by: mihailik <https://github.com/mihailik> // Definitions: https://github.com/borisyankov/DefinitelyTyped declare function CodeMirror(host: HTMLElement, options?: CodeMirror.EditorConfiguration): CodeMirror.Editor; declare function CodeMirror(callback: (host: HTMLElement) => void , options?: CodeMirror.EditorConfiguration): CodeMirror.Editor; declare module CodeMirror { export var Pass: any; function fromTextArea(host: HTMLTextAreaElement, options?: EditorConfiguration): CodeMirror.EditorFromTextArea; var version: string; /** If you want to define extra methods in terms of the CodeMirror API, it is possible to use defineExtension. This will cause the given value(usually a method) to be added to all CodeMirror instances created from then on. */ function defineExtension(name: string, value: any): void; /** Like defineExtension, but the method will be added to the interface for Doc objects instead. */ function defineDocExtension(name: string, value: any): void; /** Similarly, defineOption can be used to define new options for CodeMirror. The updateFunc will be called with the editor instance and the new value when an editor is initialized, and whenever the option is modified through setOption. */ function defineOption(name: string, default_: any, updateFunc: Function): void; /** If your extention just needs to run some code whenever a CodeMirror instance is initialized, use CodeMirror.defineInitHook. Give it a function as its only argument, and from then on, that function will be called (with the instance as argument) whenever a new CodeMirror instance is initialized. */ function defineInitHook(func: Function): void; function on(element: any, eventName: string, handler: Function): void; function off(element: any, eventName: string, handler: Function): void; /** Fired whenever a change occurs to the document. changeObj has a similar type as the object passed to the editor's "change" event, but it never has a next property, because document change events are not batched (whereas editor change events are). */ function on(doc: Doc, eventName: 'change', handler: (instance: Doc, change: EditorChange) => void ): void; function off(doc: Doc, eventName: 'change', handler: (instance: Doc, change: EditorChange) => void ): void; /** See the description of the same event on editor instances. */ function on(doc: Doc, eventName: 'beforeChange', handler: (instance: Doc, change: EditorChangeCancellable) => void ): void; function off(doc: Doc, eventName: 'beforeChange', handler: (instance: Doc, change: EditorChangeCancellable) => void ): void; /** Fired whenever the cursor or selection in this document changes. */ function on(doc: Doc, eventName: 'cursorActivity', handler: (instance: CodeMirror.Editor) => void ): void; function off(doc: Doc, eventName: 'cursorActivity', handler: (instance: CodeMirror.Editor) => void ): void; /** Equivalent to the event by the same name as fired on editor instances. */ function on(doc: Doc, eventName: 'beforeSelectionChange', handler: (instance: CodeMirror.Editor, selection: { head: Position; anchor: Position; }) => void ): void; function off(doc: Doc, eventName: 'beforeSelectionChange', handler: (instance: CodeMirror.Editor, selection: { head: Position; anchor: Position; }) => void ): void; /** Will be fired when the line object is deleted. A line object is associated with the start of the line. Mostly useful when you need to find out when your gutter markers on a given line are removed. */ function on(line: LineHandle, eventName: 'delete', handler: () => void ): void; function off(line: LineHandle, eventName: 'delete', handler: () => void ): void; /** Fires when the line's text content is changed in any way (but the line is not deleted outright). The change object is similar to the one passed to change event on the editor object. */ function on(line: LineHandle, eventName: 'change', handler: (line: LineHandle, change: EditorChange) => void ): void; function off(line: LineHandle, eventName: 'change', handler: (line: LineHandle, change: EditorChange) => void ): void; /** Fired when the cursor enters the marked range. From this event handler, the editor state may be inspected but not modified, with the exception that the range on which the event fires may be cleared. */ function on(marker: TextMarker, eventName: 'beforeCursorEnter', handler: () => void ): void; function off(marker: TextMarker, eventName: 'beforeCursorEnter', handler: () => void ): void; /** Fired when the range is cleared, either through cursor movement in combination with clearOnEnter or through a call to its clear() method. Will only be fired once per handle. Note that deleting the range through text editing does not fire this event, because an undo action might bring the range back into existence. */ function on(marker: TextMarker, eventName: 'clear', handler: () => void ): void; function off(marker: TextMarker, eventName: 'clear', handler: () => void ): void; /** Fired when the last part of the marker is removed from the document by editing operations. */ function on(marker: TextMarker, eventName: 'hide', handler: () => void ): void; function off(marker: TextMarker, eventName: 'hide', handler: () => void ): void; /** Fired when, after the marker was removed by editing, a undo operation brought the marker back. */ function on(marker: TextMarker, eventName: 'unhide', handler: () => void ): void; function off(marker: TextMarker, eventName: 'unhide', handler: () => void ): void; /** Fired whenever the editor re-adds the widget to the DOM. This will happen once right after the widget is added (if it is scrolled into view), and then again whenever it is scrolled out of view and back in again, or when changes to the editor options or the line the widget is on require the widget to be redrawn. */ function on(line: LineWidget, eventName: 'redraw', handler: () => void ): void; function off(line: LineWidget, eventName: 'redraw', handler: () => void ): void; /** Various CodeMirror-related objects emit events, which allow client code to react to various situations. Handlers for such events can be registered with the on and off methods on the objects that the event fires on. To fire your own events, use CodeMirror.signal(target, name, args...), where target is a non-DOM-node object. */ function signal(target: any, name: string, ...args: any[]): void; interface Editor { /** Tells you whether the editor currently has focus. */ hasFocus(): boolean; /** Used to find the target position for horizontal cursor motion.start is a { line , ch } object, amount an integer(may be negative), and unit one of the string "char", "column", or "word". Will return a position that is produced by moving amount times the distance specified by unit. When visually is true , motion in right - to - left text will be visual rather than logical. When the motion was clipped by hitting the end or start of the document, the returned value will have a hitSide property set to true. */ findPosH(start: CodeMirror.Position, amount: number, unit: string, visually: boolean): { line: number; ch: number; hitSide?: boolean; }; /** Similar to findPosH , but used for vertical motion.unit may be "line" or "page". The other arguments and the returned value have the same interpretation as they have in findPosH. */ findPosV(start: CodeMirror.Position, amount: number, unit: string): { line: number; ch: number; hitSide?: boolean; }; /** Change the configuration of the editor. option should the name of an option, and value should be a valid value for that option. */ setOption(option: string, value: any): void; /** Retrieves the current value of the given option for this editor instance. */ getOption(option: string): any; /** Attach an additional keymap to the editor. This is mostly useful for add - ons that need to register some key handlers without trampling on the extraKeys option. Maps added in this way have a higher precedence than the extraKeys and keyMap options, and between them, the maps added earlier have a lower precedence than those added later, unless the bottom argument was passed, in which case they end up below other keymaps added with this method. */ addKeyMap(map: any, bottom?: boolean): void; /** Disable a keymap added with addKeyMap.Either pass in the keymap object itself , or a string, which will be compared against the name property of the active keymaps. */ removeKeyMap(map: any): void; /** Enable a highlighting overlay.This is a stateless mini - mode that can be used to add extra highlighting. For example, the search add - on uses it to highlight the term that's currently being searched. mode can be a mode spec or a mode object (an object with a token method). The options parameter is optional. If given, it should be an object. Currently, only the opaque option is recognized. This defaults to off, but can be given to allow the overlay styling, when not null, to override the styling of the base mode entirely, instead of the two being applied together. */ addOverlay(mode: any, options?: any): void; /** Pass this the exact argument passed for the mode parameter to addOverlay to remove an overlay again. */ removeOverlay(mode: any): void; /** Retrieve the currently active document from an editor. */ getDoc(): CodeMirror.Doc; /** Attach a new document to the editor. Returns the old document, which is now no longer associated with an editor. */ swapDoc(doc: CodeMirror.Doc): CodeMirror.Doc; /** Sets the gutter marker for the given gutter (identified by its CSS class, see the gutters option) to the given value. Value can be either null, to clear the marker, or a DOM element, to set it. The DOM element will be shown in the specified gutter next to the specified line. */ setGutterMarker(line: any, gutterID: string, value: HTMLElement): CodeMirror.LineHandle; /** Remove all gutter markers in the gutter with the given ID. */ clearGutter(gutterID: string): void; /** Set a CSS class name for the given line.line can be a number or a line handle. where determines to which element this class should be applied, can can be one of "text" (the text element, which lies in front of the selection), "background"(a background element that will be behind the selection), or "wrap" (the wrapper node that wraps all of the line's elements, including gutter elements). class should be the name of the class to apply. */ addLineClass(line: any, where: string, _class_: string): CodeMirror.LineHandle; /** Remove a CSS class from a line.line can be a line handle or number. where should be one of "text", "background", or "wrap"(see addLineClass). class can be left off to remove all classes for the specified node, or be a string to remove only a specific class. */ removeLineClass(line: any, where: string, class_: string): CodeMirror.LineHandle; /** Returns the line number, text content, and marker status of the given line, which can be either a number or a line handle. */ lineInfo(line: any): { line: any; handle: any; text: string; /** Object mapping gutter IDs to marker elements. */ gutterMarks: any; textClass: string; bgClass: string; wrapClass: string; /** Array of line widgets attached to this line. */ widgets: any; }; /** Puts node, which should be an absolutely positioned DOM node, into the editor, positioned right below the given { line , ch } position. When scrollIntoView is true, the editor will ensure that the entire node is visible (if possible). To remove the widget again, simply use DOM methods (move it somewhere else, or call removeChild on its parent). */ addWidget(pos: CodeMirror.Position, node: HTMLElement, scrollIntoView: boolean): void; /** Adds a line widget, an element shown below a line, spanning the whole of the editor's width, and moving the lines below it downwards. line should be either an integer or a line handle, and node should be a DOM node, which will be displayed below the given line. options, when given, should be an object that configures the behavior of the widget. Note that the widget node will become a descendant of nodes with CodeMirror-specific CSS classes, and those classes might in some cases affect it. */ addLineWidget(line: any, node: HTMLElement, options?: { /** Whether the widget should cover the gutter. */ coverGutter: boolean; /** Whether the widget should stay fixed in the face of horizontal scrolling. */ noHScroll: boolean; /** Causes the widget to be placed above instead of below the text of the line. */ above: boolean; /** When true, will cause the widget to be rendered even if the line it is associated with is hidden. */ showIfHidden: boolean; }): CodeMirror.LineWidget; /** Programatically set the size of the editor (overriding the applicable CSS rules). width and height height can be either numbers(interpreted as pixels) or CSS units ("100%", for example). You can pass null for either of them to indicate that that dimension should not be changed. */ setSize(width: any, height: any): void; /** Scroll the editor to a given(pixel) position.Both arguments may be left as null or undefined to have no effect. */ scrollTo(x: number, y: number): void; /** Get an { left , top , width , height , clientWidth , clientHeight } object that represents the current scroll position, the size of the scrollable area, and the size of the visible area(minus scrollbars). */ getScrollInfo(): { left: any; top: any; width: any; height: any; clientWidth: any; clientHeight: any; } /** Scrolls the given element into view. pos is a { line , ch } position, referring to a given character, null, to refer to the cursor. The margin parameter is optional. When given, it indicates the amount of pixels around the given area that should be made visible as well. */ scrollIntoView(pos: CodeMirror.Position, margin?: number): void; /** Scrolls the given element into view. pos is a { left , top , right , bottom } object, in editor-local coordinates. The margin parameter is optional. When given, it indicates the amount of pixels around the given area that should be made visible as well. */ scrollIntoView(pos: { left: number; top: number; right: number; bottom: number; }, margin: number): void; /** Returns an { left , top , bottom } object containing the coordinates of the cursor position. If mode is "local" , they will be relative to the top-left corner of the editable document. If it is "page" or not given, they are relative to the top-left corner of the page. where is a boolean indicating whether you want the start(true) or the end(false) of the selection. */ cursorCoords(where: boolean, mode: string): { left: number; top: number; bottom: number; }; /** Returns an { left , top , bottom } object containing the coordinates of the cursor position. If mode is "local" , they will be relative to the top-left corner of the editable document. If it is "page" or not given, they are relative to the top-left corner of the page. where specifies the precise position at which you want to measure. */ cursorCoords(where: CodeMirror.Position, mode: string): { left: number; top: number; bottom: number; }; /** Returns the position and dimensions of an arbitrary character.pos should be a { line , ch } object. This differs from cursorCoords in that it'll give the size of the whole character, rather than just the position that the cursor would have when it would sit at that position. */ charCoords(pos: CodeMirror.Position, mode: string): { left: number; right: number; top: number; bottom: number; }; /** Given an { left , top } object , returns the { line , ch } position that corresponds to it. The optional mode parameter determines relative to what the coordinates are interpreted. It may be "window" , "page"(the default) , or "local". */ coordsChar(object: { left: number; top: number; }, mode?: string): CodeMirror.Position; /** Returns the line height of the default font for the editor. */ defaultTextHeight(): number; /** Returns the pixel width of an 'x' in the default font for the editor. (Note that for non - monospace fonts , this is mostly useless, and even for monospace fonts, non - ascii characters might have a different width). */ defaultCharWidth(): number; /** Returns a { from , to } object indicating the start (inclusive) and end (exclusive) of the currently rendered part of the document. In big documents, when most content is scrolled out of view, CodeMirror will only render the visible part, and a margin around it. See also the viewportChange event. */ getViewport(): { from: number; to: number }; /** If your code does something to change the size of the editor element (window resizes are already listened for), or unhides it, you should probably follow up by calling this method to ensure CodeMirror is still looking as intended. */ refresh(): void; /** Retrieves information about the token the current mode found before the given position (a {line, ch} object). */ getTokenAt(pos: CodeMirror.Position): { /** The character(on the given line) at which the token starts. */ start: number; /** The character at which the token ends. */ end: number; /** The token's string. */ string: string; /** The token type the mode assigned to the token, such as "keyword" or "comment" (may also be null). */ type: string; /** The mode's state at the end of this token. */ state: any; }; /** Returns the mode's parser state, if any, at the end of the given line number. If no line number is given, the state at the end of the document is returned. This can be useful for storing parsing errors in the state, or getting other kinds of contextual information for a line. */ getStateAfter(line?: number): any; /** CodeMirror internally buffers changes and only updates its DOM structure after it has finished performing some operation. If you need to perform a lot of operations on a CodeMirror instance, you can call this method with a function argument. It will call the function, buffering up all changes, and only doing the expensive update after the function returns. This can be a lot faster. The return value from this method will be the return value of your function. */ operation<T>(fn: ()=> T): T; /** Adjust the indentation of the given line. The second argument (which defaults to "smart") may be one of: "prev" Base indentation on the indentation of the previous line. "smart" Use the mode's smart indentation if available, behave like "prev" otherwise. "add" Increase the indentation of the line by one indent unit. "subtract" Reduce the indentation of the line. */ indentLine(line: number, dir?: string): void; /** Give the editor focus. */ focus(): void; /** Returns the hidden textarea used to read input. */ getInputField(): HTMLTextAreaElement; /** Returns the DOM node that represents the editor, and controls its size. Remove this from your tree to delete an editor instance. */ getWrapperElement(): HTMLElement; /** Returns the DOM node that is responsible for the scrolling of the editor. */ getScrollerElement(): HTMLElement; /** Fetches the DOM node that contains the editor gutters. */ getGutterElement(): HTMLElement; /** Events are registered with the on method (and removed with the off method). These are the events that fire on the instance object. The name of the event is followed by the arguments that will be passed to the handler. The instance argument always refers to the editor instance. */ on(eventName: string, handler: (instance: CodeMirror.Editor) => void ): void; off(eventName: string, handler: (instance: CodeMirror.Editor) => void ): void; /** Fires every time the content of the editor is changed. */ on(eventName: 'change', handler: (instance: CodeMirror.Editor, change: CodeMirror.EditorChangeLinkedList) => void ): void; off(eventName: 'change', handler: (instance: CodeMirror.Editor, change: CodeMirror.EditorChangeLinkedList) => void ): void; /** This event is fired before a change is applied, and its handler may choose to modify or cancel the change. The changeObj never has a next property, since this is fired for each individual change, and not batched per operation. Note: you may not do anything from a "beforeChange" handler that would cause changes to the document or its visualization. Doing so will, since this handler is called directly from the bowels of the CodeMirror implementation, probably cause the editor to become corrupted. */ on(eventName: 'beforeChange', handler: (instance: CodeMirror.Editor, change: CodeMirror.EditorChangeCancellable) => void ): void; off(eventName: 'beforeChange', handler: (instance: CodeMirror.Editor, change: CodeMirror.EditorChangeCancellable) => void ): void; /** Will be fired when the cursor or selection moves, or any change is made to the editor content. */ on(eventName: 'cursorActivity', handler: (instance: CodeMirror.Editor) => void ): void; off(eventName: 'cursorActivity', handler: (instance: CodeMirror.Editor) => void ): void; /** This event is fired before the selection is moved. Its handler may modify the resulting selection head and anchor. Handlers for this event have the same restriction as "beforeChange" handlers � they should not do anything to directly update the state of the editor. */ on(eventName: 'beforeSelectionChange', handler: (instance: CodeMirror.Editor, selection: { head: CodeMirror.Position; anchor: CodeMirror.Position; }) => void ): void; off(eventName: 'beforeSelectionChange', handler: (instance: CodeMirror.Editor, selection: { head: CodeMirror.Position; anchor: CodeMirror.Position; }) => void ): void; /** Fires whenever the view port of the editor changes (due to scrolling, editing, or any other factor). The from and to arguments give the new start and end of the viewport. */ on(eventName: 'viewportChange', handler: (instance: CodeMirror.Editor, from: number, to: number) => void ): void; off(eventName: 'viewportChange', handler: (instance: CodeMirror.Editor, from: number, to: number) => void ): void; /** Fires when the editor gutter (the line-number area) is clicked. Will pass the editor instance as first argument, the (zero-based) number of the line that was clicked as second argument, the CSS class of the gutter that was clicked as third argument, and the raw mousedown event object as fourth argument. */ on(eventName: 'gutterClick', handler: (instance: CodeMirror.Editor, line: number, gutter: string, clickEvent: Event) => void ): void; off(eventName: 'gutterClick', handler: (instance: CodeMirror.Editor, line: number, gutter: string, clickEvent: Event) => void ): void; /** Fires whenever the editor is focused. */ on(eventName: 'focus', handler: (instance: CodeMirror.Editor) => void ): void; off(eventName: 'focus', handler: (instance: CodeMirror.Editor) => void ): void; /** Fires whenever the editor is unfocused. */ on(eventName: 'blur', handler: (instance: CodeMirror.Editor) => void ): void; off(eventName: 'blur', handler: (instance: CodeMirror.Editor) => void ): void; /** Fires when the editor is scrolled. */ on(eventName: 'scroll', handler: (instance: CodeMirror.Editor) => void ): void; off(eventName: 'scroll', handler: (instance: CodeMirror.Editor) => void ): void; /** Will be fired whenever CodeMirror updates its DOM display. */ on(eventName: 'update', handler: (instance: CodeMirror.Editor) => void ): void; off(eventName: 'update', handler: (instance: CodeMirror.Editor) => void ): void; /** Fired whenever a line is (re-)rendered to the DOM. Fired right after the DOM element is built, before it is added to the document. The handler may mess with the style of the resulting element, or add event handlers, but should not try to change the state of the editor. */ on(eventName: 'renderLine', handler: (instance: CodeMirror.Editor, line: number, element: HTMLElement) => void ): void; off(eventName: 'renderLine', handler: (instance: CodeMirror.Editor, line: number, element: HTMLElement) => void ): void; } interface EditorFromTextArea extends Editor { /** Copy the content of the editor into the textarea. */ save(): void; /** Remove the editor, and restore the original textarea (with the editor's current content). */ toTextArea(): void; /** Returns the textarea that the instance was based on. */ getTextArea(): HTMLTextAreaElement; } class Doc { constructor (text: string, mode?: any, firstLineNumber?: number); /** Get the current editor content. You can pass it an optional argument to specify the string to be used to separate lines (defaults to "\n"). */ getValue(seperator?: string): string; /** Set the editor content. */ setValue(content: string): void; /** Get the text between the given points in the editor, which should be {line, ch} objects. An optional third argument can be given to indicate the line separator string to use (defaults to "\n"). */ getRange(from: Position, to: CodeMirror.Position, seperator?: string): string; /** Replace the part of the document between from and to with the given string. from and to must be {line, ch} objects. to can be left off to simply insert the string at position from. */ replaceRange(replacement: string, from: CodeMirror.Position, to: CodeMirror.Position): void; /** Get the content of line n. */ getLine(n: number): string; /** Set the content of line n. */ setLine(n: number, text: string): void; /** Remove the given line from the document. */ removeLine(n: number): void; /** Get the number of lines in the editor. */ lineCount(): number; /** Get the first line of the editor. This will usually be zero but for linked sub-views, or documents instantiated with a non-zero first line, it might return other values. */ firstLine(): number; /** Get the last line of the editor. This will usually be lineCount() - 1, but for linked sub-views, it might return other values. */ lastLine(): number; /** Fetches the line handle for the given line number. */ getLineHandle(num: number): CodeMirror.LineHandle; /** Given a line handle, returns the current position of that line (or null when it is no longer in the document). */ getLineNumber(handle: CodeMirror.LineHandle): number; /** Iterate over the whole document, and call f for each line, passing the line handle. This is a faster way to visit a range of line handlers than calling getLineHandle for each of them. Note that line handles have a text property containing the line's content (as a string). */ eachLine(f: (line: CodeMirror.LineHandle) => void ): void; /** Iterate over the range from start up to (not including) end, and call f for each line, passing the line handle. This is a faster way to visit a range of line handlers than calling getLineHandle for each of them. Note that line handles have a text property containing the line's content (as a string). */ eachLine(start: number, end: number, f: (line: CodeMirror.LineHandle) => void ): void; /** Set the editor content as 'clean', a flag that it will retain until it is edited, and which will be set again when such an edit is undone again. Useful to track whether the content needs to be saved. */ markClean(): void; /** Returns whether the document is currently clean (not modified since initialization or the last call to markClean). */ isClean(): boolean; /** Get the currently selected code. */ getSelection(): string; /** Replace the selection with the given string. By default, the new selection will span the inserted text. The optional collapse argument can be used to change this � passing "start" or "end" will collapse the selection to the start or end of the inserted text. */ replaceSelection(replacement: string, collapse?: string): void; /** start is a an optional string indicating which end of the selection to return. It may be "start" , "end" , "head"(the side of the selection that moves when you press shift + arrow), or "anchor"(the fixed side of the selection).Omitting the argument is the same as passing "head".A { line , ch } object will be returned. */ getCursor(start?: string): CodeMirror.Position; /** Return true if any text is selected. */ somethingSelected(): boolean; /** Set the cursor position.You can either pass a single { line , ch } object , or the line and the character as two separate parameters. */ setCursor(pos: CodeMirror.Position): void; /** Set the selection range.anchor and head should be { line , ch } objects.head defaults to anchor when not given. */ setSelection(anchor: CodeMirror.Position, head: CodeMirror.Position): void; /** Similar to setSelection , but will, if shift is held or the extending flag is set, move the head of the selection while leaving the anchor at its current place. pos2 is optional , and can be passed to ensure a region (for example a word or paragraph) will end up selected (in addition to whatever lies between that region and the current anchor). */ extendSelection(from: CodeMirror.Position, to?: CodeMirror.Position): void; /** Sets or clears the 'extending' flag , which acts similar to the shift key, in that it will cause cursor movement and calls to extendSelection to leave the selection anchor in place. */ setExtending(value: boolean): void; /** Retrieve the editor associated with a document. May return null. */ getEditor(): CodeMirror.Editor; /** Create an identical copy of the given doc. When copyHistory is true , the history will also be copied.Can not be called directly on an editor. */ copy(copyHistory: boolean): CodeMirror.Doc; /** Create a new document that's linked to the target document. Linked documents will stay in sync (changes to one are also applied to the other) until unlinked. */ linkedDoc(options: { /** When turned on, the linked copy will share an undo history with the original. Thus, something done in one of the two can be undone in the other, and vice versa. */ sharedHist?: boolean; from?: number; /** Can be given to make the new document a subview of the original. Subviews only show a given range of lines. Note that line coordinates inside the subview will be consistent with those of the parent, so that for example a subview starting at line 10 will refer to its first line as line 10, not 0. */ to?: number; /** By default, the new document inherits the mode of the parent. This option can be set to a mode spec to give it a different mode. */ mode: any; }): CodeMirror.Doc; /** Break the link between two documents. After calling this , changes will no longer propagate between the documents, and, if they had a shared history, the history will become separate. */ unlinkDoc(doc: CodeMirror.Doc): void; /** Will call the given function for all documents linked to the target document. It will be passed two arguments, the linked document and a boolean indicating whether that document shares history with the target. */ iterLinkedDocs(fn: (doc: CodeMirror.Doc, sharedHist: boolean) => void ): void; /** Undo one edit (if any undo events are stored). */ undo(): void; /** Redo one undone edit. */ redo(): void; /** Returns an object with {undo, redo } properties , both of which hold integers , indicating the amount of stored undo and redo operations. */ historySize(): { undo: number; redo: number; }; /** Clears the editor's undo history. */ clearHistory(): void; /** Get a(JSON - serializeable) representation of the undo history. */ getHistory(): any; /** Replace the editor's undo history with the one provided, which must be a value as returned by getHistory. Note that this will have entirely undefined results if the editor content isn't also the same as it was when getHistory was called. */ setHistory(history: any): void; /** Can be used to mark a range of text with a specific CSS class name. from and to should be { line , ch } objects. */ markText(from: CodeMirror.Position, to: CodeMirror.Position, options?: CodeMirror.TextMarkerOptions): TextMarker; /** Inserts a bookmark, a handle that follows the text around it as it is being edited, at the given position. A bookmark has two methods find() and clear(). The first returns the current position of the bookmark, if it is still in the document, and the second explicitly removes the bookmark. */ setBookmark(pos: CodeMirror.Position, options?: { /** Can be used to display a DOM node at the current location of the bookmark (analogous to the replacedWith option to markText). */ widget?: HTMLElement; /** By default, text typed when the cursor is on top of the bookmark will end up to the right of the bookmark. Set this option to true to make it go to the left instead. */ insertLeft?: boolean; }): CodeMirror.TextMarker; /** Returns an array of all the bookmarks and marked ranges found between the given positions. */ findMarks(from: CodeMirror.Position, to: CodeMirror.Position): TextMarker[]; /** Returns an array of all the bookmarks and marked ranges present at the given position. */ findMarksAt(pos: CodeMirror.Position): TextMarker[]; /** Returns an array containing all marked ranges in the document. */ getAllMarks(): CodeMirror.TextMarker[]; /** Gets the mode object for the editor. Note that this is distinct from getOption("mode"), which gives you the mode specification, rather than the resolved, instantiated mode object. */ getMode(): any; /** Calculates and returns a { line , ch } object for a zero-based index whose value is relative to the start of the editor's text. If the index is out of range of the text then the returned object is clipped to start or end of the text respectively. */ posFromIndex(index: number): CodeMirror.Position; /** The reverse of posFromIndex. */ indexFromPos(object: CodeMirror.Position): number; } interface LineHandle { text: string; } interface TextMarker { /** Remove the mark. */ clear(): void; /** Returns a {from, to} object (both holding document positions), indicating the current position of the marked range, or undefined if the marker is no longer in the document. */ find(): CodeMirror.Position; /** Returns an object representing the options for the marker. If copyWidget is given true, it will clone the value of the replacedWith option, if any. */ getOptions(copyWidget: boolean): CodeMirror.TextMarkerOptions; } interface LineWidget { /** Removes the widget. */ clear(): void; /** Call this if you made some change to the widget's DOM node that might affect its height. It'll force CodeMirror to update the height of the line that contains the widget. */ changed(): void; } interface EditorChange { /** Position (in the pre-change coordinate system) where the change started. */ from: CodeMirror.Position; /** Position (in the pre-change coordinate system) where the change ended. */ to: CodeMirror.Position; /** Array of strings representing the text that replaced the changed range (split by line). */ text: string[]; /** Text that used to be between from and to, which is overwritten by this change. */ removed: string; /** String representing the origin of the change event and wether it can be merged with history */ origin: string; } interface EditorChangeLinkedList extends CodeMirror.EditorChange { /** Points to another change object (which may point to another, etc). */ next?: CodeMirror.EditorChangeLinkedList; } interface EditorChangeCancellable extends CodeMirror.EditorChange { /** may be used to modify the change. All three arguments to update are optional, and can be left off to leave the existing value for that field intact. */ update(from?: CodeMirror.Position, to?: CodeMirror.Position, text?: string): void; cancel(): void; } interface Position { ch: number; line: number; } interface EditorConfiguration { /** string| The starting value of the editor. Can be a string, or a document object. */ value?: any; /** string|object. The mode to use. When not given, this will default to the first mode that was loaded. It may be a string, which either simply names the mode or is a MIME type associated with the mode. Alternatively, it may be an object containing configuration options for the mode, with a name property that names the mode (for example {name: "javascript", json: true}). */ mode?: any; /** The theme to style the editor with. You must make sure the CSS file defining the corresponding .cm-s-[name] styles is loaded. The default is "default". */ theme?: string; /** How many spaces a block (whatever that means in the edited language) should be indented. The default is 2. */ indentUnit?: number; /** Whether to use the context-sensitive indentation that the mode provides (or just indent the same as the line before). Defaults to true. */ smartIndent?: boolean; /** The width of a tab character. Defaults to 4. */ tabSize?: number; /** Whether, when indenting, the first N*tabSize spaces should be replaced by N tabs. Default is false. */ indentWithTabs?: boolean; /** Configures whether the editor should re-indent the current line when a character is typed that might change its proper indentation (only works if the mode supports indentation). Default is true. */ electricChars?: boolean; /** Determines whether horizontal cursor movement through right-to-left (Arabic, Hebrew) text is visual (pressing the left arrow moves the cursor left) or logical (pressing the left arrow moves to the next lower index in the string, which is visually right in right-to-left text). The default is false on Windows, and true on other platforms. */ rtlMoveVisually?: boolean; /** Configures the keymap to use. The default is "default", which is the only keymap defined in codemirror.js itself. Extra keymaps are found in the keymap directory. See the section on keymaps for more information. */ keyMap?: string; /** Can be used to specify extra keybindings for the editor, alongside the ones defined by keyMap. Should be either null, or a valid keymap value. */ extraKeys?: any; /** Whether CodeMirror should scroll or wrap for long lines. Defaults to false (scroll). */ lineWrapping?: boolean; /** Whether to show line numbers to the left of the editor. */ lineNumbers?: boolean; /** At which number to start counting lines. Default is 1. */ firstLineNumber?: number; /** A function used to format line numbers. The function is passed the line number, and should return a string that will be shown in the gutter. */ lineNumberFormatter?: (line: number) => string; /** Can be used to add extra gutters (beyond or instead of the line number gutter). Should be an array of CSS class names, each of which defines a width (and optionally a background), and which will be used to draw the background of the gutters. May include the CodeMirror-linenumbers class, in order to explicitly set the position of the line number gutter (it will default to be to the right of all other gutters). These class names are the keys passed to setGutterMarker. */ gutters?: string[]; /** Determines whether the gutter scrolls along with the content horizontally (false) or whether it stays fixed during horizontal scrolling (true, the default). */ fixedGutter?: boolean; /** boolean|string. This disables editing of the editor content by the user. If the special value "nocursor" is given (instead of simply true), focusing of the editor is also disallowed. */ readOnly?: any; /**Whether the cursor should be drawn when a selection is active. Defaults to false. */ showCursorWhenSelecting?: boolean; /** The maximum number of undo levels that the editor stores. Defaults to 40. */ undoDepth?: number; /** The period of inactivity (in milliseconds) that will cause a new history event to be started when typing or deleting. Defaults to 500. */ historyEventDelay?: number; /** The tab index to assign to the editor. If not given, no tab index will be assigned. */ tabindex?: number; /** Can be used to make CodeMirror focus itself on initialization. Defaults to off. When fromTextArea is used, and no explicit value is given for this option, it will be set to true when either the source textarea is focused, or it has an autofocus attribute and no other element is focused. */ autofocus?: boolean; /** Controls whether drag-and - drop is enabled. On by default. */ dragDrop?: boolean; /** When given , this will be called when the editor is handling a dragenter , dragover , or drop event. It will be passed the editor instance and the event object as arguments. The callback can choose to handle the event itself , in which case it should return true to indicate that CodeMirror should not do anything further. */ onDragEvent?: (instance: CodeMirror.Editor, event: Event) => boolean; /** This provides a rather low - level hook into CodeMirror's key handling. If provided, this function will be called on every keydown, keyup, and keypress event that CodeMirror captures. It will be passed two arguments, the editor instance and the key event. This key event is pretty much the raw key event, except that a stop() method is always added to it. You could feed it to, for example, jQuery.Event to further normalize it. This function can inspect the key event, and handle it if it wants to. It may return true to tell CodeMirror to ignore the event. Be wary that, on some browsers, stopping a keydown does not stop the keypress from firing, whereas on others it does. If you respond to an event, you should probably inspect its type property and only do something when it is keydown (or keypress for actions that need character data). */ onKeyEvent?: (instance: CodeMirror.Editor, event: Event) => boolean; /** Half - period in milliseconds used for cursor blinking. The default blink rate is 530ms. */ cursorBlinkRate?: number; /** Determines the height of the cursor. Default is 1 , meaning it spans the whole height of the line. For some fonts (and by some tastes) a smaller height (for example 0.85), which causes the cursor to not reach all the way to the bottom of the line, looks better */ cursorHeight?: number; /** Highlighting is done by a pseudo background - thread that will work for workTime milliseconds, and then use timeout to sleep for workDelay milliseconds. The defaults are 200 and 300, you can change these options to make the highlighting more or less aggressive. */ workTime?: number; /** See workTime. */ workDelay?: number; /** Indicates how quickly CodeMirror should poll its input textarea for changes(when focused). Most input is captured by events, but some things, like IME input on some browsers, don't generate events that allow CodeMirror to properly detect it. Thus, it polls. Default is 100 milliseconds. */ pollInterval?: number /** By default, CodeMirror will combine adjacent tokens into a single span if they have the same class. This will result in a simpler DOM tree, and thus perform better. With some kinds of styling(such as rounded corners), this will change the way the document looks. You can set this option to false to disable this behavior. */ flattenSpans?: boolean; /** When highlighting long lines, in order to stay responsive, the editor will give up and simply style the rest of the line as plain text when it reaches a certain position. The default is 10000. You can set this to Infinity to turn off this behavior. */ maxHighlightLength?: number; /** Specifies the amount of lines that are rendered above and below the part of the document that's currently scrolled into view. This affects the amount of updates needed when scrolling, and the amount of work that such an update does. You should usually leave it at its default, 10. Can be set to Infinity to make sure the whole document is always rendered, and thus the browser's text search works on it. This will have bad effects on performance of big documents. */ viewportMargin?: number; /** Optional lint configuration to be used in conjunction with CodeMirror's linter addon. */ lint?: LintOptions; } interface TextMarkerOptions { /** Assigns a CSS class to the marked stretch of text. */ className?: string; /** Determines whether text inserted on the left of the marker will end up inside or outside of it. */ inclusiveLeft?: boolean; /** Like inclusiveLeft , but for the right side. */ inclusiveRight?: boolean; /** Atomic ranges act as a single unit when cursor movement is concerned � i.e. it is impossible to place the cursor inside of them. In atomic ranges, inclusiveLeft and inclusiveRight have a different meaning � they will prevent the cursor from being placed respectively directly before and directly after the range. */ atomic?: boolean; /** Collapsed ranges do not show up in the display.Setting a range to be collapsed will automatically make it atomic. */ collapsed?: boolean; /** When enabled, will cause the mark to clear itself whenever the cursor enters its range. This is mostly useful for text - replacement widgets that need to 'snap open' when the user tries to edit them. The "clear" event fired on the range handle can be used to be notified when this happens. */ clearOnEnter?: boolean; /** Use a given node to display this range.Implies both collapsed and atomic. The given DOM node must be an inline element(as opposed to a block element). */ replacedWith?: HTMLElement; /** A read - only span can, as long as it is not cleared, not be modified except by calling setValue to reset the whole document. Note: adding a read - only span currently clears the undo history of the editor, because existing undo events being partially nullified by read - only spans would corrupt the history (in the current implementation). */ readOnly?: boolean; /** When set to true (default is false), adding this marker will create an event in the undo history that can be individually undone(clearing the marker). */ addToHistory?: boolean; /** Can be used to specify an extra CSS class to be applied to the leftmost span that is part of the marker. */ startStyle?: string; /** Equivalent to startStyle, but for the rightmost span. */ endStyle?: string; /** When the target document is linked to other documents, you can set shared to true to make the marker appear in all documents. By default, a marker appears only in its target document. */ shared?: boolean; } interface StringStream { lastColumnPos: number; lastColumnValue: number; lineStart: number; /** * Current position in the string. */ pos: number; /** * Where the stream's position was when it was first passed to the token function. */ start: number; /** * The current line's content. */ string: string; /** * Number of spaces per tab character. */ tabSize: number; /** * Returns true only if the stream is at the end of the line. */ eol(): boolean; /** * Returns true only if the stream is at the start of the line. */ sol(): boolean; /** * Returns the next character in the stream without advancing it. Will return an null at the end of the line. */ peek(): string; /** * Returns the next character in the stream and advances it. Also returns null when no more characters are available. */ next(): string; /** * match can be a character, a regular expression, or a function that takes a character and returns a boolean. * If the next character in the stream 'matches' the given argument, it is consumed and returned. * Otherwise, undefined is returned. */ eat(match: string): string; eat(match: RegExp): string; eat(match: (char: string) => boolean): string; /** * Repeatedly calls eat with the given argument, until it fails. Returns true if any characters were eaten. */ eatWhile(match: string): boolean; eatWhile(match: RegExp): boolean; eatWhile(match: (char: string) => boolean): boolean; /** * Shortcut for eatWhile when matching white-space. */ eatSpace(): boolean; /** * Moves the position to the end of the line. */ skipToEnd(): void; /** * Skips to the next occurrence of the given character, if found on the current line (doesn't advance the stream if * the character does not occur on the line). * * Returns true if the character was found. */ skipTo(ch: string): boolean; /** * Act like a multi-character eat - if consume is true or not given - or a look-ahead that doesn't update the stream * position - if it is false. pattern can be either a string or a regular expression starting with ^. When it is a * string, caseFold can be set to true to make the match case-insensitive. When successfully matching a regular * expression, the returned value will be the array returned by match, in case you need to extract matched groups. */ match(pattern: string, consume?: boolean, caseFold?: boolean): boolean; match(pattern: RegExp, consume?: boolean): string[]; /** * Backs up the stream n characters. Backing it up further than the start of the current token will cause things to * break, so be careful. */ backUp(n: number): void; /** * Returns the column (taking into account tabs) at which the current token starts. */ column(): number; /** * Tells you how far the current line has been indented, in spaces. Corrects for tab characters. */ indentation(): number; /** * Get the string between the start of the current token and the current stream position. */ current(): string; } /** * A Mode is, in the simplest case, a lexer (tokenizer) for your language — a function that takes a character stream as input, * advances it past a token, and returns a style for that token. More advanced modes can also handle indentation for the language. */ interface Mode<T> { /** * This function should read one token from the stream it is given as an argument, optionally update its state, * and return a style string, or null for tokens that do not have to be styled. Multiple styles can be returned, separated by spaces. */ token(stream: StringStream, state: T): string; /** * A function that produces a state object to be used at the start of a document. */ startState?: () => T; /** * For languages that have significant blank lines, you can define a blankLine(state) method on your mode that will get called * whenever a blank line is passed over, so that it can update the parser state. */ blankLine?: (state: T) => void; /** * Given a state returns a safe copy of that state. */ copyState?: (state: T) => T; /** * The indentation method should inspect the given state object, and optionally the textAfter string, which contains the text on * the line that is being indented, and return an integer, the amount of spaces to indent. */ indent?: (state: T, textAfter: string) => number; /** The four below strings are used for working with the commenting addon. */ /** * String that starts a line comment. */ lineComment?: string; /** * String that starts a block comment. */ blockCommentStart?: string; /** * String that ends a block comment. */ blockCommentEnd?: string; /** * String to put at the start of continued lines in a block comment. */ blockCommentLead?: string; /** * Trigger a reindent whenever one of the characters in the string is typed. */ electricChars?: string /** * Trigger a reindent whenever the regex matches the part of the line before the cursor. */ electricinput?: RegExp } /** * A function that, given a CodeMirror configuration object and an optional mode configuration object, returns a mode object. */ interface ModeFactory<T> { (config: CodeMirror.EditorConfiguration, modeOptions?: any): Mode<T> } /** * id will be the id for the defined mode. Typically, you should use this second argument to defineMode as your module scope function * (modes should not leak anything into the global scope!), i.e. write your whole mode inside this function. */ function defineMode(id: string, modefactory: ModeFactory<any>): void; /** * The first argument is a configuration object as passed to the mode constructor function, and the second argument * is a mode specification as in the EditorConfiguration mode option. */ function getMode<T>(config: CodeMirror.EditorConfiguration, mode: any): Mode<T>; /** * Utility function from the overlay.js addon that allows modes to be combined. The mode given as the base argument takes care of * most of the normal mode functionality, but a second (typically simple) mode is used, which can override the style of text. * Both modes get to parse all of the text, but when both assign a non-null style to a piece of code, the overlay wins, unless * the combine argument was true and not overridden, or state.overlay.combineTokens was true, in which case the styles are combined. */ function overlayMode<T, S>(base: Mode<T>, overlay: Mode<S>, combine?: boolean): Mode<any> /** * async specifies that the lint process runs asynchronously. hasGutters specifies that lint errors should be displayed in the CodeMirror * gutter, note that you must use this in conjunction with [ "CodeMirror-lint-markers" ] as an element in the gutters argument on * initialization of the CodeMirror instance. */ interface LintStateOptions { async: boolean; hasGutters: boolean; } /** * Adds the getAnnotations callback to LintStateOptions which may be overridden by the user if they choose use their own * linter. */ interface LintOptions extends LintStateOptions { getAnnotations: AnnotationsCallback; } /** * A function that calls the updateLintingCallback with any errors found during the linting process. */ interface AnnotationsCallback { (content: string, updateLintingCallback: UpdateLintingCallback, options: LintStateOptions, codeMirror: Editor): void; } /** * A function that, given an array of annotations, updates the CodeMirror linting GUI with those annotations */ interface UpdateLintingCallback { (codeMirror: Editor, annotations: Annotation[]): void; } /** * An annotation contains a description of a lint error, detailing the location of the error within the code, the severity of the error, * and an explaination as to why the error was thrown. */ interface Annotation { from: Position; message?: string; severity?: string; to?: Position; } }
mit
bunnyblue/tchannel
node/endpoint-handler.js
3375
// Copyright (c) 2015 Uber Technologies, Inc. // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. 'use strict'; var EventEmitter = require('./lib/event_emitter'); var inherits = require('util').inherits; var util = require('util'); var errors = require('./errors'); var coerceRequestHandler = require('./request-handler'); function TChannelEndpointHandler(serviceName) { if (!(this instanceof TChannelEndpointHandler)) { return new TChannelEndpointHandler(serviceName); } var self = this; EventEmitter.call(self); self.handleEndpointEvent = self.defineEvent('handleEndpoint'); self.serviceName = serviceName; self.endpoints = Object.create(null); } inherits(TChannelEndpointHandler, EventEmitter); TChannelEndpointHandler.prototype.type = 'tchannel.endpoint-handler'; TChannelEndpointHandler.prototype.register = function register(name, options, handler) { var self = this; if (typeof options === 'function') { handler = options; options = {}; } if (typeof handler !== 'function') { throw errors.InvalidHandlerError(); } if (options.streamed) handler.canStream = true; self.endpoints[name] = coerceRequestHandler(handler, self, options); return handler; }; TChannelEndpointHandler.prototype.handleRequest = function handleRequest(req, buildResponse) { var self = this; var handler = self.endpoints[req.endpoint]; self.handleEndpointEvent.emit(self, { name: req.endpoint, handler: handler }); if (!handler) { buildResponse({streamed: false}).sendError('BadRequest', util.format( 'no such endpoint service=%j endpoint=%j', req.serviceName, req.endpoint)); } else { handler.handleRequest(req, buildResponse); } }; TChannelEndpointHandler.prototype.withArg23 = function withArg23(req, buildResponse, handler) { req.withArg23(function gotArg23(err, arg2, arg3) { var res = buildResponse({streamed: false}); if (err) { // TODO: log error res.sendError('UnexpectedError', util.format( 'error accumulating arg2/arg3: %s: %s', err.constructor.name, err.message)); } else { handler.handleRequest(req, res, arg2, arg3); } }); }; module.exports = TChannelEndpointHandler;
mit
rpoli/clayClean
node_modules/grunt-react/main.js
138
/** * Module Dependencies */ var Transformers = require('./lib/transformers.js'); /** * Exports */ module.exports = Transformers;
mit
huanpc/IoT-1
gui/controller/.venv/lib/python3.5/site-packages/jet/static/jet/js/src/features/sidebar/bookmarks.js
4219
var $ = require('jquery'); var t = require('../../utils/translate'); require('jquery-ui/ui/core'); require('jquery-ui/ui/widget'); require('jquery-ui/ui/mouse'); require('jquery-ui/ui/draggable'); require('jquery-ui/ui/resizable'); require('jquery-ui/ui/button'); require('jquery-ui/ui/dialog'); var SideBarBookmarks = function($sidebar) { this.$sidebar = $sidebar; }; SideBarBookmarks.prototype = { addBookmark: function($form, $container) { $.ajax({ url: $form.attr('action'), method: $form.attr('method'), dataType: 'json', data: $form.serialize(), success: function (result) { if (result.error) { return; } var $item = $container .find('.bookmark-item.clone') .clone() .removeClass('clone'); $item .attr('href', result.url) .find('.sidebar-link-label') .append(result.title); $item .find('.bookmarks-remove') .data('bookmark-id', result.id); $container.append($item); } }); }, deleteBookmark: function($form, $item) { $.ajax({ url: $form.attr('action'), method: $form.attr('method'), dataType: 'json', data: $form.serialize(), success: function (result) { if (result.error) { return; } $item.remove(); } }); }, initBookmarksAdding: function($sidebar) { var self = this; var $form = $sidebar.find('#bookmarks-add-form'); var $titleInput = $form.find('input[name="title"]'); var $urlInput = $form.find('input[name="url"]'); var $dialog = $sidebar.find('#bookmarks-add-dialog'); var $container = $sidebar.find('.bookmarks-list'); $sidebar.find('.bookmarks-add').on('click', function(e) { e.preventDefault(); var $link = $(this); var defaultTitle = $link.data('title') ? $link.data('title') : document.title; var url = window.location.href; $titleInput.val(defaultTitle); $urlInput.val(url); var buttons = {}; buttons[t('Add')] = function() { self.addBookmark($form, $container); $(this).dialog('close'); }; buttons[t('Cancel')] = function() { $(this).dialog('close'); }; $dialog.dialog({ resizable: false, modal: true, buttons: buttons }); }); }, initBookmarksRemoving: function($sidebar) { var self = this; var $form = $sidebar.find('#bookmarks-remove-form'); var $idInput = $form.find('input[name="id"]'); var $dialog = $sidebar.find('#bookmarks-remove-dialog'); $sidebar.on('click', '.bookmarks-remove', function(e) { e.preventDefault(); var $remove = $(this); var $item = $remove.closest('.bookmark-item'); var bookmarkId = $remove.data('bookmark-id'); $idInput.val(bookmarkId); var buttons = {}; buttons[t('Delete')] = function() { self.deleteBookmark($form, $item); $(this).dialog('close'); }; buttons[t('Cancel')] = function() { $(this).dialog('close'); }; $dialog.dialog({ resizable: false, modal: true, buttons: buttons }); }); }, initBookmarks: function($sidebar) { this.initBookmarksAdding($sidebar); this.initBookmarksRemoving($sidebar); }, run: function() { try { this.initBookmarksAdding(this.$sidebar); this.initBookmarksRemoving(this.$sidebar); } catch (e) { console.error(e, e.stack); } } }; module.exports = SideBarBookmarks;
mit
malioret/ConnectedSoccerApi
vendor/sonata-project/core-bundle/Date/MomentFormatConverter.php
1806
<?php /* * This file is part of the Sonata package. * * (c) Thomas Rabaix <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Sonata\CoreBundle\Date; /** * Handles Moment.js <-> PHP date format conversion. * * Inspired by https://github.com/fightbulc/moment.php/blob/master/src/Moment/CustomFormats/MomentJs.php * * * @author Hugo Briand <[email protected]> * @author Andrej Hudec <[email protected]> */ class MomentFormatConverter { /** * @var array This defines the mapping between PHP ICU date format (key) and moment.js date format (value) * For ICU formats see http://userguide.icu-project.org/formatparse/datetime#TOC-Date-Time-Format-Syntax * For Moment formats see http://momentjs.com/docs/#/displaying/format/ */ private static $formatConvertRules = array( // year 'yyyy' => 'YYYY', 'yy' => 'YY', 'y' => 'YYYY', // month // 'MMMM'=>'MMMM', 'MMM'=>'MMM', 'MM'=>'MM', // day 'dd' => 'DD', 'd' => 'D', // hour // 'HH'=>'HH', 'H'=>'H', 'h'=>'h', 'hh'=>'hh', // am/pm // 'a' => 'a', // minute // 'mm'=>'mm', 'm'=>'m', // second // 'ss'=>'ss', 's'=>'s', // day of week 'EE' => 'ddd', 'EEEEEE' => 'dd', // timezone 'ZZZZZ' => 'Z', 'ZZZ' => 'ZZ', // letter 'T' '\'T\'' => 'T', ); /** * Returns associated moment.js format. * * @param $format PHP Date format * * @return string Moment.js date format */ public function convert($format) { return strtr($format, self::$formatConvertRules); } }
mit
SimenB/webpack
test/cases/parsing/issue-3769/imported.js
23
exports.test = "test";
mit
BrennanConroy/corefx
src/System.Runtime.InteropServices/src/System/Runtime/InteropServices/ComAwareEventInfo.cs
4700
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Reflection; // This type is obsolete, and is expected to be used in very specific ways or it may // throw null reference exceptions. #pragma warning disable CS8610 namespace System.Runtime.InteropServices { public class ComAwareEventInfo : EventInfo { private EventInfo _innerEventInfo; public ComAwareEventInfo(Type type, string eventName) { _innerEventInfo = type.GetEvent(eventName)!; } public override void AddEventHandler(object target, Delegate handler) { if (Marshal.IsComObject(target)) { // retrieve sourceIid and dispid GetDataForComInvocation(_innerEventInfo, out Guid sourceIid, out int dispid); ComEventsHelper.Combine(target, sourceIid, dispid, handler); } else { // we are dealing with a managed object - just add the delegate through reflection _innerEventInfo.AddEventHandler(target, handler); } } public override void RemoveEventHandler(object target, Delegate handler) { if (Marshal.IsComObject(target)) { // retrieve sourceIid and dispid GetDataForComInvocation(_innerEventInfo, out Guid sourceIid, out int dispid); ComEventsHelper.Remove(target, sourceIid, dispid, handler); } else { // we are dealing with a managed object - just add the delegate through reflection _innerEventInfo.RemoveEventHandler(target, handler); } } public override EventAttributes Attributes => _innerEventInfo.Attributes; public override MethodInfo? GetAddMethod(bool nonPublic) => _innerEventInfo.GetAddMethod(nonPublic); public override MethodInfo[] GetOtherMethods(bool nonPublic) => _innerEventInfo.GetOtherMethods(nonPublic); public override MethodInfo? GetRaiseMethod(bool nonPublic) => _innerEventInfo.GetRaiseMethod(nonPublic); public override MethodInfo? GetRemoveMethod(bool nonPublic) => _innerEventInfo.GetRemoveMethod(nonPublic); public override Type? DeclaringType => _innerEventInfo.DeclaringType; public override object[] GetCustomAttributes(Type attributeType, bool inherit) { return _innerEventInfo.GetCustomAttributes(attributeType, inherit); } public override object[] GetCustomAttributes(bool inherit) { return _innerEventInfo.GetCustomAttributes(inherit); } public override IList<CustomAttributeData> GetCustomAttributesData() => _innerEventInfo.GetCustomAttributesData(); public override bool IsDefined(Type attributeType, bool inherit) { return _innerEventInfo.IsDefined(attributeType, inherit); } public override int MetadataToken => _innerEventInfo.MetadataToken; public override Module Module => _innerEventInfo.Module; public override string Name => _innerEventInfo.Name; public override Type? ReflectedType => _innerEventInfo.ReflectedType; private static void GetDataForComInvocation(EventInfo eventInfo, out Guid sourceIid, out int dispid) { object[] comEventInterfaces = eventInfo.DeclaringType!.GetCustomAttributes(typeof(ComEventInterfaceAttribute), inherit: false); if (comEventInterfaces == null || comEventInterfaces.Length == 0) { throw new InvalidOperationException(SR.InvalidOperation_NoComEventInterfaceAttribute); } if (comEventInterfaces.Length > 1) { throw new AmbiguousMatchException(SR.AmbiguousMatch_MultipleEventInterfaceAttributes); } Type sourceInterface = ((ComEventInterfaceAttribute)comEventInterfaces[0]).SourceInterface; Guid guid = sourceInterface.GUID; MethodInfo methodInfo = sourceInterface.GetMethod(eventInfo.Name)!; Attribute? dispIdAttribute = Attribute.GetCustomAttribute(methodInfo, typeof(DispIdAttribute)); if (dispIdAttribute == null) { throw new InvalidOperationException(SR.InvalidOperation_NoDispIdAttribute); } sourceIid = guid; dispid = ((DispIdAttribute)dispIdAttribute).Value; } } }
mit
thomas-moison/concrete5
concrete/core/controllers/single_pages/dashboard/system/backup_restore/backup.php
3819
<?php defined('C5_EXECUTE') or die("Access Denied."); Loader::library('backup'); class Concrete5_Controller_Dashboard_System_BackupRestore_Backup extends DashboardBaseController { public function on_start() { $this->addHeaderItem(Loader::helper('html')->javascript('jquery.cookie.js')); parent::on_start(); } public function run_backup() { $encrypt = $this->post('useEncryption'); $tp = new TaskPermission(); if ($tp->canBackup()) { $encrypt = (bool) $encrypt; try { $backup = Backup::execute($encrypt); } catch(Exception $e) { $this->set('error', $e); } $this->view(); } } public function view() { $tp = new TaskPermission(); if ($tp->canBackup()) { $fh = Loader::helper('file'); $arr_bckups = @$fh->getDirectoryContents(DIR_FILES_BACKUPS); $arr_backupfileinfo = Array(); if (count($arr_bckups) > 0) { foreach ($arr_bckups as $bkupfile) { preg_match('/[0-9]+/',$bkupfile,$timestamp); $arr_backupfileinfo[] = Array("file" => $bkupfile, "date" => date("Y-m-d H:i:s",$timestamp[0])); } $this->set('backups',$arr_backupfileinfo); } } } public function download($file) { $tp = new TaskPermission(); if (!$tp->canBackup()) { return false; } if (file_exists(DIR_FILES_BACKUPS . '/'. $file)) { chmod(DIR_FILES_BACKUPS . '/'. $file, 0666); if (file_exists(DIR_FILES_BACKUPS . '/' . $file)) { $f = Loader::helper('file'); $f->forceDownload(DIR_FILES_BACKUPS . '/' . $file); exit; } chmod(DIR_FILES_BACKUPS . '/'. $file, 000); } else { $this->set('error', array(t('Unable to locate file %s', DIR_FILES_BACKUPS . '/' . $file))); $this->view(); } } public function delete_backup() { $tp = new TaskPermission(); if (!$tp->canBackup()) { return false; } $str_fname = $this->post('backup_file'); //For Security reasons... allow only known characters in the string e.g no / \ so you can't exploit this $int_mResult = preg_match('/[0-9A-Za-z._]+/',$str_fname,$ar_matches); $str_fname = $ar_matches[0]; if (!is_null($str_fname) && trim($str_fname) != "" && !preg_match('/\.\./',$str_fname) && file_exists(DIR_FILES_BACKUPS . "/$str_fname")) { chmod(DIR_FILES_BACKUPS . "/$str_fname",666); unlink(DIR_FILES_BACKUPS . "/$str_fname"); } $this->view(); } public function restore_backup() { set_time_limit(0); $tp = new TaskPermission(); if (!$tp->canBackup()) { return false; } $file = basename(realpath(DIR_FILES_BACKUPS . '/' . $this->post('backup_file'))); $fh = Loader::helper('file'); $db = Loader::db(); if (!file_exists(DIR_FILES_BACKUPS . '/'. $file)) { throw new Exception(t('Invalid backup file specified.')); } chmod(DIR_FILES_BACKUPS . '/'. $file, 0666); $str_restSql = $fh->getContents(DIR_FILES_BACKUPS . '/' . $file); //$str_restSql = file_get_contents(DIR_FILES_BACKUPS . '/' . $file); if (!$str_restSql) { $this->set("error",array("There was an error trying to restore the database. This file was empty.")); $this->view(); return false; } $crypt = Loader::helper('encryption'); if ( !preg_match('/INSERT/m',$str_restSql) && !preg_match('/CREATE/m',$str_restSql) ) { $str_restSql = $crypt->decrypt($str_restSql); } $arr_sqlStmts = explode("\n\n",$str_restSql); foreach ($arr_sqlStmts as $str_stmt) { if (trim($str_stmt) != "") { $res_restoration = $db->execute($str_stmt); if (!$res_restoration) { $this->set("error",array("There was an error trying to restore the database. In query $str_stmt")); return; } } } $this->set("message","Restoration Sucessful"); //reset perms for security! chmod(DIR_FILES_BACKUPS . '/'. $file, 000); Cache::flush(); $this->view(); } }
mit
andrewmcvearry/mille-bean
lwjgl/src/templates/org/lwjgl/opengles/EXT_shadow_samplers.java
2176
/* * Copyright (c) 2002-2008 LWJGL Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'LWJGL' nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.lwjgl.opengles; public interface EXT_shadow_samplers { /** * Accepted by the &lt;pname&gt; parameter of TexParameterf, TexParameteri, * TexParameterfv, TexParameteriv, GetTexParameterfv, and GetTexParameteriv: */ int GL_TEXTURE_COMPARE_MODE_EXT = 0x884C, GL_TEXTURE_COMPARE_FUNC_EXT = 0x884D; /** * Accepted by the &lt;param&gt; parameter of TexParameterf, TexParameteri, * TexParameterfv, and TexParameteriv when the &lt;pname&gt; parameter is * TEXTURE_COMPARE_MODE_EXT: */ int GL_COMPARE_REF_TO_TEXTURE_EXT = 0x884E; }
mit
12spokes/fog
lib/fog/hp/requests/network/delete_floating_ip.rb
869
module Fog module HP class Network class Real # Delete an existing floating ip # # ==== Parameters # * 'floating_ip_id'<~String>: - UUId of the floating IP address to delete def delete_floating_ip(floating_ip_id) request( :expects => 204, :method => 'DELETE', :path => "floatingips/#{floating_ip_id}" ) end end class Mock def delete_floating_ip(floating_ip_id) response = Excon::Response.new if list_floating_ips.body['floatingips'].detect {|_| _['id'] == floating_ip_id} self.data[:floating_ips].delete(floating_ip_id) response.status = 204 response else raise Fog::HP::Network::NotFound end end end end end end
mit
sr-education/split
spec/algorithms/weighted_sample_spec.rb
853
require "spec_helper" describe Split::Algorithms::WeightedSample do it "should return an alternative" do experiment = Split::ExperimentCatalog.find_or_create('link_color', {'blue' => 100}, {'red' => 0 }) expect(Split::Algorithms::WeightedSample.choose_alternative(experiment).class).to eq(Split::Alternative) end it "should always return a heavily weighted option" do experiment = Split::ExperimentCatalog.find_or_create('link_color', {'blue' => 100}, {'red' => 0 }) expect(Split::Algorithms::WeightedSample.choose_alternative(experiment).name).to eq('blue') end it "should return one of the results" do experiment = Split::ExperimentCatalog.find_or_create('link_color', {'blue' => 1}, {'red' => 1 }) expect(['red', 'blue']).to include Split::Algorithms::WeightedSample.choose_alternative(experiment).name end end
mit
pscrevs/gcconnex
mod/tidypics/views/default/forms/photos/admin/delete_image.php
554
<?php /** * Deletion of a Tidypics image by GUID provided (if image entry does not get properly displayed on site and delete button can not be reached) * * [email protected] */ $title = elgg_echo('tidypics:delete_image'); $content = '<p>' . elgg_echo('tidypics:delete_image_blurb') . '</p>'; $content .= '<label>' . elgg_echo('tidypics:delete_image_id') . '</label>'; $content .= elgg_view('input/text', array('name' => 'guid')); echo elgg_view_module('inline', $title, $content); echo elgg_view('input/submit', array('value' => elgg_echo("delete")));
gpl-2.0
JKT-OSSCLUB/ProActio
Proactio Front/php/pear/phpwebdriver/WebDriverException.php
210
<?php /** * WebDriverException */ class WebDriverException extends Exception { public function __construct($message, $code, $previous = null) { parent::__construct($message, $code); } } ?>
gpl-2.0
girish-ankit/drupal8
modules/examples/stream_wrapper_example/src/StreamWrapper/MockSessionTrait.php
2265
<?php namespace Drupal\stream_wrapper_example\StreamWrapper; use Symfony\Component\HttpFoundation\RequestStack; use Symfony\Component\HttpFoundation\Request; use Symfony\Component\HttpFoundation\Session\SessionInterface; use Prophecy\Argument; /** * A trait to expose a mock session type to PHPUnit tests. */ trait MockSessionTrait { /** * We'll use this array to back our mock session. * * @var array */ protected $sessionStore; /** * A representation of the HTTP request. * * @var \Symfony\Component\HttpFoundation\RequestStack|\Prophecy\Prophecy\ProphecyInterface */ protected $requestStack; /** * Create a mock session object. * * @return ProphecyInterface * A test double, or mock, of a RequestStack object * that can be used to return a mock Session object. */ protected function createSessionMock() { $this->sessionStore = []; $session = $this->prophesize(SessionInterface::class); $test = $this; $session ->get('stream_wrapper_example', []) ->will(function ($args) use ($test) { return $test->getSessionStore(); }); $session ->set('stream_wrapper_example', Argument::any()) ->will(function ($args) use ($test) { $test->setSessionStore($args[1]); }); $session ->remove('stream_wrapper_example') ->will(function ($args) use ($test) { $test->resetSessionStore(); }); $request = $this->prophesize(Request::class); $request ->getSession() ->willReturn($session->reveal()); $request_stack = $this->prophesize(RequestStack::class); $request_stack ->getCurrentRequest() ->willReturn($request->reveal()); return $this->requestStack = $request_stack->reveal(); } /** * Get a session wrapper. */ public function getSessionWrapper() { return new SessionWrapper($this->requestStack); } /** * Helper for mocks. */ public function getSessionStore() { return $this->sessionStore; } /** * Helper for our mocks. */ public function setSessionStore($data) { $this->sessionStore = $data; } /** * Helper for our mocks. */ public function resetSessionStore() { $this->sessionStore = []; } }
gpl-2.0
md-5/jdk10
test/hotspot/jtreg/vmTestbase/nsk/jdi/FloatValue/compareTo/compareto001.java
12291
/* * Copyright (c) 2002, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package nsk.jdi.FloatValue.compareTo; import nsk.share.*; import nsk.share.jpda.*; import nsk.share.jdi.*; import com.sun.jdi.*; import java.io.*; import java.util.*; public class compareto001 { //------------------------------------------------------- immutable common fields final static String SIGNAL_READY = "ready"; final static String SIGNAL_GO = "go"; final static String SIGNAL_QUIT = "quit"; private static int waitTime; private static int exitStatus; private static ArgumentHandler argHandler; private static Log log; private static IOPipe pipe; private static Debugee debuggee; private static ReferenceType debuggeeClass; //------------------------------------------------------- mutable common fields private final static String prefix = "nsk.jdi.FloatValue.compareTo"; private final static String className = ".compareto001"; private final static String debuggerName = prefix + className; private final static String debuggeeName = debuggerName + "a"; //------------------------------------------------------- test specific fields private final static String objectToCheck = "testedObj"; private final static String arrPrimitives = "testedFields"; private static Value objectValue; private static List fieldList; //------------------------------------------------------- immutable common methods public static void main(String argv[]) { System.exit(Consts.JCK_STATUS_BASE + run(argv, System.out)); } private static void display(String msg) { log.display("debugger > " + msg); } private static void complain(String msg) { log.complain("debugger FAILURE > " + msg); } public static int run(String argv[], PrintStream out) { exitStatus = Consts.TEST_FAILED; argHandler = new ArgumentHandler(argv); log = new Log(out, argHandler); waitTime = argHandler.getWaitTime() * 60000; debuggee = Debugee.prepareDebugee(argHandler, log, debuggeeName); debuggeeClass = debuggee.classByName(debuggeeName); if ( debuggeeClass == null ) { complain("Class '" + debuggeeName + "' not found."); exitStatus = Consts.TEST_FAILED; } execTest(); debuggee.quit(); return exitStatus; } //------------------------------------------------------ mutable common method private static void execTest() { debuggeeClass = debuggee.classByName(debuggeeName); if ( debuggeeClass == null ) { complain("Class '" + debuggeeName + "' not found."); return; } // getting of object to check Field field = debuggeeClass.fieldByName(objectToCheck); if ( field == null ) { complain("Field '" + objectToCheck + "' not found."); return; } objectValue = debuggeeClass.getValue(field); if ( objectValue == null ) { complain("Field '" + objectToCheck + "' not initialized."); return; } // geting of array of primitive types field = debuggeeClass.fieldByName(arrPrimitives); if ( field == null ) { complain("Field '" + arrPrimitives + "' not found."); return; } Value arrValue = debuggeeClass.getValue(field); if ( arrValue == null || !(arrValue instanceof ArrayReference) ) { complain("Field '" + arrValue + "' is wrong."); return; } ArrayReference primitiveValues = (ArrayReference)arrValue; fieldList = ((ClassType )objectValue.type()).allFields(); Value v1, currentValue; FloatValue value; Field fldOtherType; exitStatus = Consts.TEST_PASSED; // comparing loop for (int i = 0; i < fieldList.size(); i++ ) { field = (Field )fieldList.get(i); v1 = ((ObjectReference )objectValue).getValue(field); if ( !(v1 instanceof FloatValue) ) { exitStatus = Consts.TEST_FAILED; continue; } value = (FloatValue )v1; // comparing with debuggee's fields display("Checking compateTo(Object object) method for FloatValue: " + value); for (int j = 0; j < primitiveValues.length(); j++) { arrValue = primitiveValues.getValue(j); fldOtherType = debuggeeClass.fieldByName(((StringReference )arrValue).value()); if ( fldOtherType == null ) { complain("Field '" + arrValue + "' not found."); exitStatus = Consts.TEST_FAILED; continue; } currentValue = debuggeeClass.getValue(fldOtherType); if ( !PerformComparing(value, currentValue) ) exitStatus = Consts.TEST_FAILED; } } } //--------------------------------------------------------- test specific methods private static boolean PerformComparing(FloatValue value, Object object ) { boolean result = true; // assertion [ x.compareTo(x) == 0 ] if (value.compareTo(value) != 0) { complain("Failed assertion [ x.compareTo(x) == 0 ] for value: " + value.toString()); result = false; } if (object instanceof FloatValue) { FloatValue floatObject = (FloatValue)object; try { // assertion [ x.compareTo(y) == 0 <==> x.equals(y) ] if ( ((value.equals(object)) && (value.compareTo(floatObject) != 0)) || (!(value.equals(object)) && (value.compareTo(floatObject) == 0)) ) { complain("Failed assertion [ (x.compareTo(y) == 0) is identical to (x.equals(y) == true) ] \n\t" + "where 'x' is FloatValue: " + value + " and 'y' is FloatValue : " + floatObject + " \n\t" + "result of (x.compareTo(y)): " + value.compareTo(floatObject) + "\n\t" + "result of (x.equals(y)): " + value.equals(object) ); result = false; } // assertion [ x.compareTo(y) == 0 <==> y.compareTo(x) == 0 ] if ( ((value.compareTo(floatObject) == 0) && (floatObject.compareTo(value) != 0)) || ((value.compareTo(floatObject) != 0) && (floatObject.compareTo(value) == 0)) ) { complain("Failed assertion [ (x.compareTo(y) == 0) is identical to (y.compareTo(x) == 0) ] \n\t" + "where 'x' is FloatValue: " + value + " and 'y' is FloatValue : " + floatObject + " \n\t" + "result of (x.compareTo(y)): " + value.compareTo(floatObject) + "\n\t" + "result of (y.compareTo(x)): " + floatObject.compareTo(value) ); result = false; } if (value.compareTo(floatObject) != 0) { // assertion [ if (x.compareTo(y) == i) then (y.compareTo(x) == -i) ] if (value.compareTo(floatObject) != -(floatObject.compareTo(value))) { complain("Failed assertion [ if (x.compareTo(y) == i) then (y.compareTo(x) == -i) ] \n\t" + "where 'x' is FloatValue: " + value + " and 'y' is FloatValue : " + floatObject + " \n\t" + "result of (x.compareTo(y)): " + value.compareTo(floatObject) + "\n\t" + "result of (y.compareTo(x)): " + floatObject.compareTo(value) ); result = false; } } // assertion [ if (x.compareTo(y) > 0) and (y.compareTo(z) > 0), then (x.compareTo(z) > 0) ] if (value.compareTo(floatObject) > 0) { FloatValue lessValue = FindLessFloatValue(floatObject); if (lessValue != null) { if (value.compareTo(lessValue) <= 0) { complain("Failed assertion [ if (x.compareTo(y) > 0) and (y.compareTo(z) > 0), then (x.compareTo(z) > 0) ] \n\t" + "where 'x' is FloatValue: " + value + " , 'y' is FloatValue : " + floatObject + " , 'z' is FloatValue : " + lessValue + " \n\t" + "result of (x.compareTo(y)): " + value.compareTo(floatObject) + "\n\t" + "result of (y.compareTo(z)): " + floatObject.compareTo(lessValue) + "\n\t" + "result of (x.compareTo(z)): " + value.compareTo(lessValue) ); result = false; } } } } catch (Exception e) { complain("Caught unexpected " + e + " when comparing \n\t" + "FloatValue: " + value + " and FloatValue argument: " + object); result = false; } } else if (object == null) { try { value.compareTo(null); complain("Does not throw expected NullPointerException when comparing \n\t" + "FloatValue: " + value + " and null argument"); result = false; } catch (NullPointerException ne) { // continue } catch (Exception e) { complain("Caught unexpected " + e + " when comparing \n\t" + "FloatValue: " + value + " and null argument"); result = false; } } else { try { value.compareTo((FloatValue)object); complain("Does not throw expected ClassCastException when comparing \n\t" + "FloatValue: " + value + " and argument: " + object); result = false; } catch (ClassCastException ne) { // continue } catch (Exception e) { complain("Caught unexpected " + e + " when comparing \n\t" + "FloatValue: " + value + " and argument: " + object); result = false; } } return result; } /** * This function searches the static <i>fieldList</i> - the list of mirrored * fields of debuggee's <i>compareto001aClassToCheck</i> class. Search is aimed * to find another FloatValue field which is less then method's argument via * <i>compareTo</i> method. */ private static FloatValue FindLessFloatValue (FloatValue currentValue) { FloatValue result = null; for (int i = 0; i < fieldList.size(); i++ ) { Field field = (Field )fieldList.get(i); FloatValue newValue = (FloatValue)((ObjectReference )objectValue).getValue(field); if (currentValue.compareTo(newValue) > 0) { result = newValue; break; } } return result; } } //--------------------------------------------------------- test specific classes
gpl-2.0
fjbatresv/REMIS
vendor/cedriclombardot/admingenerator-generator-bundle/Admingenerator/GeneratorBundle/Builder/Admin/ListBuilderAction.php
336
<?php namespace Admingenerator\GeneratorBundle\Builder\Admin; /** * This builder generates php for lists actions * @author cedric Lombardot */ class ListBuilderAction extends ListBuilder { public function getOutputName() { return $this->getGenerator()->getGeneratedControllerFolder().'/ListController.php'; } }
gpl-2.0
12sm/erinmyers
wp-content/plugins/installer/tables/wprc-plugin-install-list-table.php
18982
<?php class WPRC_PluginInstall_List_Table extends WP_Plugin_Install_List_Table { private $repositories_tabs = ''; private $results_per_repo; private function get_current_tab() { return $_GET['tab']; } public function display() { $protocol = strpos(strtolower( $_SERVER['SERVER_PROTOCOL'] ),'https') === FALSE ? 'http://' : 'https://'; $host = $_SERVER['HTTP_HOST']; $uri = $_SERVER['REQUEST_URI']; $current_url = $protocol . $host . $uri; if ( isset( $this -> repositories_tabs ) && is_array( $this -> repositories_tabs ) && count( $this -> repositories_tabs ) > 0 ) { if ( ! isset( $_GET['repo-tab'] ) ) $active_repo = $this -> repositories_tabs[0]['id']; else $active_repo = (int)$_GET['repo-tab']; echo '<div class="clear"></div>'; echo '<h2 class="nav-tab-wrapper">'; foreach ( $this -> repositories_tabs as $repo_data ) { $results = (int)$this -> results_per_repo[ $repo_data['id'] ]['results']; $class_active = ( $active_repo == $repo_data['id'] ) ? 'nav-tab-active' : ''; $repo_tab_url = add_query_arg( 'repo-tab', $repo_data['id'], $current_url ); $repo_tab_url = remove_query_arg( 'paged', $repo_tab_url ); echo '<a href="' . $repo_tab_url . '" class="nav-tab ' . $class_active . '">' . $repo_data['name'] . ' (' . $results . ')</a>'; } echo '</h2>'; } parent::display(); } public function prepare_items() { $tab = $this->get_current_tab(); // replace search results tab only if($tab<>'search') { parent::prepare_items(); exit; } include_once( ABSPATH . 'wp-admin/includes/plugin-install.php' ); global $tabs, $tab, $paged, $type, $term; wp_reset_vars( array( 'tab' ) ); $paged = $this->get_pagenum(); $per_page = WPRC_PLUGINS_API_QUERY_PLUGINS_PER_PAGE; // These are the tabs which are shown on the page $tabs = array(); $tabs['dashboard'] = __( 'Search', 'installer' ); if ( 'search' == $tab ) $tabs['search'] = __( 'Search Results', 'installer' ); $tabs['upload'] = __( 'Upload', 'installer' ); $tabs['featured'] = _x( 'Featured','Plugin Installer', 'installer' ); $tabs['popular'] = _x( 'Popular','Plugin Installer', 'installer' ); $tabs['new'] = _x( 'Newest','Plugin Installer', 'installer' ); $tabs['updated'] = _x( 'Recently Updated','Plugin Installer', 'installer' ); $nonmenu_tabs = array( 'plugin-information' ); //Valid actions to perform which do not have a Menu item. $tabs = apply_filters( 'install_plugins_tabs', $tabs ); $nonmenu_tabs = apply_filters( 'install_plugins_nonmenu_tabs', $nonmenu_tabs ); // If a non-valid menu tab has been selected, And its not a non-menu action. if ( empty( $tab ) || ( !isset( $tabs[ $tab ] ) && !in_array( $tab, (array) $nonmenu_tabs ) ) ) $tab = key( $tabs ); $args = array( 'page' => $paged, 'per_page' => $per_page ); switch ( $tab ) { case 'search': $type = isset( $_REQUEST['type'] ) ? stripslashes( $_REQUEST['type'] ) : ''; $term = isset( $_REQUEST['s'] ) ? stripslashes( $_REQUEST['s'] ) : ''; switch ( $type ) { case 'tag': $args['tag'] = sanitize_title_with_dashes( $term ); break; case 'author': $args['author'] = $term; break; case 'term': // if type is missing (wp34) default to search term default: $args['search'] = $term; break; } add_action( 'install_plugins_table_header', 'install_search_form' ); break; case 'featured': case 'popular': case 'new': case 'updated': $args['browse'] = $tab; break; default: $args = false; } if ( !$args ) return; $api = plugins_api( 'query_plugins', $args ); if ( is_wp_error( $api ) ) wp_die( $api->get_error_message() . '</p> <p class="hide-if-no-js"><a href="#" onclick="document.location.reload(); return false;">' . __( 'Try again', 'installer' ) . '</a>' ); $repo_model = WPRC_Loader::getModel('repositories'); if ( isset( $_GET['repos'] ) ) { $repos = $_GET['repos']; } else { $rm = WPRC_Loader::getModel('repositories'); $repos = $rm -> getRepositoriesIds('enabled_repositories','plugins'); } // Filtering by repository if is the case... $this -> repositories_tabs = array(); // Do we need tabs? $this -> results_per_repo = $api -> results_per_repo; $repo_results_gt_zero = 0; foreach ( $this -> results_per_repo as $repo_results ) { if ( $repo_results['results'] > 0 ) $repo_results_gt_zero++; } if ( $api -> info['results'] > WPRC_PLUGINS_API_QUERY_PLUGINS_PER_PAGE && count( $repos ) > 1 && ( $repo_results_gt_zero > 1 ) ) { // We have too many results => we have to tab the results // Ordering repos by ID so Wordpress.org plugins wil appear most of the time at first place sort($repos); $tmp = array(); foreach ( $repos as $repo_id ) { if ( isset( $this -> results_per_repo[ $repo_id ] ) && $this -> results_per_repo[ $repo_id ]['results'] == 0 ) continue; // We need the name of the repo $repo_info = $repo_model -> getRepositoryByField( 'id', $repo_id ); if ( $repo_info ) { $this -> repositories_tabs[] = array( 'id' => $repo_info -> id, 'name' => $repo_info -> repository_name ); } } $filtered_api = new stdClass; $filtered_api -> info['results'] = $api -> info['results']; $filtered_api -> info['page'] = $api -> info['page']; $filtered_api -> plugins = array(); // If we are currently on a tab, we'll show only those results if ( is_array( $this -> repositories_tabs ) && count( $this -> repositories_tabs ) > 0 ) $current_repo = ( isset( $_GET['repo-tab'] ) ) ? $_GET['repo-tab'] : $this -> repositories_tabs[0]['id']; foreach ( $api -> plugins as $plugin ) { if ( $plugin -> repository_id == $current_repo ) { $filtered_api -> plugins[] = $plugin; } else { $filtered_api -> info['results']--; } } $filtered_api -> info['results'] = $this -> results_per_repo[ $current_repo ]['results']; $filtered_api -> info['total_pages'] = (int)ceil( $filtered_api -> info['results'] / WPRC_PLUGINS_API_QUERY_PLUGINS_PER_PAGE ); } else { $filtered_api = $api; } $this->items = $filtered_api->plugins; $this->set_pagination_args( array( 'total_items' => $filtered_api->info['results'], 'per_page' => $per_page, ) ); } function no_items() { _e( 'No plugins match your request.','installer' ); } function get_columns() { $columns = parent::get_columns(); $columns['source'] = __('Source', 'installer'); $columns['price'] = __('Price', 'installer'); return $columns; } public function display_rows() { $plugins_allowedtags = array( 'a' => array( 'href' => array(),'title' => array(), 'target' => array() ), 'abbr' => array( 'title' => array() ),'acronym' => array( 'title' => array() ), 'code' => array(), 'pre' => array(), 'em' => array(),'strong' => array(), 'ul' => array(), 'ol' => array(), 'li' => array(), 'p' => array(), 'br' => array() ); list( $columns, $hidden ) = $this->get_column_info(); $style = array(); foreach ( $columns as $column_name => $column_display_name ) { $style[ $column_name ] = in_array( $column_name, $hidden ) ? 'style="display:none;"' : ''; } $nonce_login = wp_create_nonce('installer-login-link'); foreach ( (array) $this->items as $plugin ) { //echo '<pre>'; print_r($plugin); echo '</pre>'; if ( is_object( $plugin ) ) $plugin = (array) $plugin; $title = wp_kses( $plugin['name'], $plugins_allowedtags ); //Limit description to 400char, and remove any HTML. $description = strip_tags( $plugin['description'] ); if ( strlen( $description ) > 400 ) $description = mb_substr( $description, 0, 400 ) . '&#8230;'; //remove any trailing entities $description = preg_replace( '/&[^;\s]{0,6}$/', '', $description ); //strip leading/trailing & multiple consecutive lines $description = trim( $description ); $description = preg_replace( "|(\r?\n)+|", "\n", $description ); //\n => <br> $description = nl2br( $description ); $version = wp_kses( $plugin['version'], $plugins_allowedtags ); $name = strip_tags( $title . ' ' . $version ); $author = $plugin['author']; if ( ! empty( $plugin['author'] ) ) $author = ' <cite>' . sprintf( __( 'By %s', 'installer' ), $author ) . '.</cite>'; $author = wp_kses( $author, $plugins_allowedtags ); $action_links = array(); $action_links[] = '<a href="' . self_admin_url( 'plugin-install.php?tab=plugin-information&amp;repository_id='. $plugin['repository']->id .'&amp;plugin=' . $plugin['slug'] . '&amp;TB_iframe=true&amp;width=600&amp;height=550' ) . '" class="thickbox" title="' . esc_attr( sprintf( __( 'More information about %s', 'installer' ), $name ) ) . '">' . __( 'Details', 'installer' ) . '</a>'; // set price $no_price_value = __('Free', 'installer'); $plugin_price = $no_price_value; if(array_key_exists('price', $plugin)) { $plugin_price = ($plugin['price']<>0 && isset($plugin['price'])) ? $plugin['currency']->symbol.$plugin['price'].' ('.$plugin['currency']->name.')' : $no_price_value; } $plugin_source = ''; if(array_key_exists('repository', $plugin)) { $plugin_source = $plugin['repository']->repository_name; } if ( current_user_can( 'install_plugins' ) || current_user_can( 'update_plugins' ) ) { //$plugin['purchase_url'] = 'http://wpml.org/shop/checkout/?buy_now=2'; // DEBUG WPRC_Loader::includeListTable('wprc-plugin-information'); $status = WPRC_PluginInformation::wprc_install_plugin_install_status( $plugin ); //$action_links[]=$status['status']; if ($status['status']!='latest_installed' && $status['status']!='newer_installed') { if(isset($plugin['purchase_link']) && !empty($plugin['purchase_link']) && ($plugin_price != $no_price_value)) { $purl=WPRC_Functions::sanitizeURL($plugin['purchase_link']); $return_url=rawurlencode(admin_url( 'plugin-install.php?tab=plugin-information&repository_id='. $plugin['repository']->id .'&plugin=' . $plugin['slug'])); $salt=rawurlencode($plugin['salt']); if (strpos($purl,'?')) $url_glue='&'; else $url_glue='?'; $purl.=$url_glue.'return_to='.$return_url.'&rsalt='.$salt; $status = array( 'status' => 'paid', 'url' => $purl, 'version' => $plugin['version'] ); } /*else { WPRC_Loader::includeListTable('wprc-plugin-information'); $status = WPRC_PluginInformation::wprc_install_plugin_install_status( $plugin ); }*/ } $url_glue = false === strpos($status['url'], '?') ? '?' : '&'; $status['url'] .= $url_glue . 'repository_id='. $plugin['repository']->id; $showedmessage=false; switch ( $status['status'] ) { case 'install': if ( $status['url'] ) $action_links[] = '<a class="install-now" href="' . $status['url'] . '" title="' . esc_attr( sprintf( __( 'Install %s', 'installer' ), $name ) ) . '">' . __( 'Install Now', 'installer' ) . '</a>'; break; case 'update_available': if ( $status['url'] ) $action_links[] = '<a href="' . $status['url'] . '" title="' . esc_attr( sprintf( __( 'Update to version %s', 'installer' ), $status['version'] ) ) . '">' . sprintf( __( 'Update Now', 'installer' ), $status['version'] ) . '</a>'; break; case 'paid': //$action_links[] = '<a href="' . $status['url'] . '" class="thickbox">' . __('Buy' , 'installer') . ' (' . $plugin['currency'] . $plugin['price'].')</a>'; if (isset($plugin['message']) && !empty($plugin['message'])) { $action_links[] = WPRC_Functions::formatMessage($plugin['message']); $showedmessage=true; } else { //$action_links[] = '<a href=" ' . admin_url('admin.php?wprc_c=repository-login&wprc_action=RepositoryLogin&repository_id=' . $plugin['repository']->id) . '&buyurl='.rawurlencode($status['url']).'" class="thickbox" title="' . __('Buy', 'installer') . '">' . __('Buy ' , 'installer') . ' (' . $plugin['currency'] . $plugin['price'].')</a>'; //$action_links[] = '<a href=" ' . $status['url'] . '" class="thickbox" title="' . __('Buy', 'installer') . '">' . __('Buy' , 'installer') . ' (' . $plugin['currency'] . $plugin['price'].')</a>'; $action_links[] = '<a href=" ' . $status['url'].'&TB_iframe=true'.'" class="thickbox" title="' . sprintf(__('Buy %s', 'installer'),$name) . '">' . sprintf(__('Buy %s' , 'installer'),'(' . $plugin['currency']->symbol . $plugin['price'].' '.$plugin['currency']->name.')') . '</a>'; } if(empty($plugin['repository']->repository_username) && empty($plugin['repository']->repository_password)){ $action_links[] = '<a href=" ' . admin_url('admin.php?wprc_c=repository-login&amp;wprc_action=RepositoryLogin&amp;repository_id=' . $plugin['repository']->id.'&amp;_wpnonce='.$nonce_login) . '" class="thickbox" title="' . __('Log in', 'installer') . '">' . __('Login' , 'installer') . '</a>'; } break; case 'latest_installed': case 'newer_installed': $action_links[] = '<span title="' . esc_attr__( 'This plugin is already installed and is up to date', 'installer' ) . ' ">' . __( 'Installed', 'installer' ) . '</span>'; break; } if (isset($plugin['message']) && !empty($plugin['message'])) { $message=WPRC_Functions::formatMessage($plugin['message']); if (isset($plugin['message_type']) && $plugin['message_type']=='notify') WPRC_AdminNotifier::addMessage('wprc-plugin-info-'.$plugin['slug'],$message); elseif (!$showedmessage) $action_links[]=$message; } } // add check compatibility link // $action_links[] = '<a href="' . self_admin_url( 'plugin-install.php?tab=plugin-information&amp;repository_id='. $plugin['repository']->id .'&amp;plugin=' . $plugin['slug'] . // '&amp;TB_iframe=true&amp;width=600&amp;height=550' ) . '" class="thickbox" title="' . // esc_attr( sprintf( __( 'Check compatibility of "%s" plugin with activated extensions', 'installer' ), $name ) ) . '">' . __( 'Check compatibility', 'installer' ) . '</a>'; $slug = ( isset( $plugin['slug'] ) ) ? '&amp;extension_slug=' . $plugin['slug'] : ''; $action_links[] = '<a href="' . self_admin_url( 'admin.php?wprc_c=repository-reporter&amp;wprc_action=checkCompatibility&amp;repository_id='. $plugin['repository']->id .'&amp;repository_url='.$plugin['repository']->repository_endpoint_url.'&amp;extension_name=' . $plugin['name'] . '&amp;extension_version=' . $plugin['version'] . $slug . '&amp;extension_type_singular=plugin&amp;extension_type=plugins&amp;TB_iframe=true&amp;width=300&amp;height=400' ) . '" class="thickbox" title="' . esc_attr( sprintf( __( 'Check compatibility status for "%s" plugin', 'installer' ), $name ) ) . '">' . __( 'Check compatibility', 'installer' ) . '</a>'; $action_links = apply_filters( 'plugin_install_action_links', $action_links, $plugin ); if (!isset($plugin['num_ratings']) || empty($plugin['num_ratings'])) $plugin['num_ratings']=0; if (!isset($plugin['rating']) || empty($plugin['rating'])) $plugin['rating']=0; ?> <tr> <td class="name column-name"<?php echo $style['name']; ?>><strong><?php echo $title; ?></strong> <div class="action-links"><?php if ( !empty( $action_links ) ) echo implode( ' | ', $action_links ); ?></div> </td> <td class="vers column-version"<?php echo $style['version']; ?>><?php echo $version; ?></td> <td class="vers column-rating"<?php echo $style['rating']; ?>> <?php global $wp_version; if (version_compare($wp_version, "3.4", "<")) { ?> <div class="star-holder" title="<?php printf( _n( '(based on %s rating)', '(based on %s ratings)', $plugin['num_ratings'], 'installer' ), number_format_i18n( intval($plugin['num_ratings']) ) ) ?>"> <div class="star star-rating" style="width: <?php echo esc_attr( $plugin['rating'] ) ?>px"></div> <?php $color = get_user_option('admin_color'); if ( empty($color) || 'fresh' == $color ) $star_url = admin_url( 'images/gray-star.png?v=20110615' ); // 'Fresh' Gray star for list tables else $star_url = admin_url( 'images/star.png?v=20110615' ); // 'Classic' Blue star ?> <div class="star star5"><img src="<?php echo $star_url; ?>" alt="<?php esc_attr_e( '5 stars' ) ?>" /></div> <div class="star star4"><img src="<?php echo $star_url; ?>" alt="<?php esc_attr_e( '4 stars' ) ?>" /></div> <div class="star star3"><img src="<?php echo $star_url; ?>" alt="<?php esc_attr_e( '3 stars' ) ?>" /></div> <div class="star star2"><img src="<?php echo $star_url; ?>" alt="<?php esc_attr_e( '2 stars' ) ?>" /></div> <div class="star star1"><img src="<?php echo $star_url; ?>" alt="<?php esc_attr_e( '1 star' ) ?>" /></div> </div> <?php } else { ?> <div class="star-holder" title="<?php printf( _n( '(based on %s rating)', '(based on %s ratings)', $plugin['num_ratings'], 'installer' ), number_format_i18n( intval($plugin['num_ratings']) ) ) ?>"> <div class="star star-rating" style="width: <?php echo esc_attr( str_replace( ',', '.', $plugin['rating'] ) ); ?>px"></div> </div> <?php } ?> </td> <td class="desc column-description"<?php echo $style['description']; ?>><?php echo $description, $author; ?></td> <td class="source column-source" align="left"><?php echo $plugin_source; ?></td> <td class="price column-price" align="left"><?php echo $plugin_price; ?></td> </tr> <?php } } } ?>
gpl-2.0
Yberion/stats_mod
code/rd-vanilla/tr_surface.cpp
63058
/* =========================================================================== Copyright (C) 1999 - 2005, Id Software, Inc. Copyright (C) 2000 - 2013, Raven Software, Inc. Copyright (C) 2001 - 2013, Activision, Inc. Copyright (C) 2013 - 2015, OpenJK contributors This file is part of the OpenJK source code. OpenJK is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see <http://www.gnu.org/licenses/>. =========================================================================== */ // tr_surf.c #include "../server/exe_headers.h" #include "tr_local.h" /* THIS ENTIRE FILE IS BACK END backEnd.currentEntity will be valid. Tess_Begin has already been called for the surface's shader. The modelview matrix will be set. It is safe to actually issue drawing commands here if you don't want to use the shader system. */ //============================================================================ /* ============== RB_CheckOverflow ============== */ void RB_CheckOverflow( int verts, int indexes ) { if (tess.numVertexes + verts < SHADER_MAX_VERTEXES && tess.numIndexes + indexes < SHADER_MAX_INDEXES) { return; } RB_EndSurface(); if ( verts >= SHADER_MAX_VERTEXES ) { Com_Error(ERR_DROP, "RB_CheckOverflow: verts > MAX (%d > %d)", verts, SHADER_MAX_VERTEXES ); } if ( indexes >= SHADER_MAX_INDEXES ) { Com_Error(ERR_DROP, "RB_CheckOverflow: indices > MAX (%d > %d)", indexes, SHADER_MAX_INDEXES ); } RB_BeginSurface(tess.shader, tess.fogNum ); } /* ============== RB_AddQuadStampExt ============== */ void RB_AddQuadStampExt( vec3_t origin, vec3_t left, vec3_t up, byte *color, float s1, float t1, float s2, float t2 ) { vec3_t normal; int ndx; RB_CHECKOVERFLOW( 4, 6 ); ndx = tess.numVertexes; // triangle indexes for a simple quad tess.indexes[ tess.numIndexes ] = ndx; tess.indexes[ tess.numIndexes + 1 ] = ndx + 1; tess.indexes[ tess.numIndexes + 2 ] = ndx + 3; tess.indexes[ tess.numIndexes + 3 ] = ndx + 3; tess.indexes[ tess.numIndexes + 4 ] = ndx + 1; tess.indexes[ tess.numIndexes + 5 ] = ndx + 2; tess.xyz[ndx][0] = origin[0] + left[0] + up[0]; tess.xyz[ndx][1] = origin[1] + left[1] + up[1]; tess.xyz[ndx][2] = origin[2] + left[2] + up[2]; tess.xyz[ndx+1][0] = origin[0] - left[0] + up[0]; tess.xyz[ndx+1][1] = origin[1] - left[1] + up[1]; tess.xyz[ndx+1][2] = origin[2] - left[2] + up[2]; tess.xyz[ndx+2][0] = origin[0] - left[0] - up[0]; tess.xyz[ndx+2][1] = origin[1] - left[1] - up[1]; tess.xyz[ndx+2][2] = origin[2] - left[2] - up[2]; tess.xyz[ndx+3][0] = origin[0] + left[0] - up[0]; tess.xyz[ndx+3][1] = origin[1] + left[1] - up[1]; tess.xyz[ndx+3][2] = origin[2] + left[2] - up[2]; // constant normal all the way around VectorSubtract( vec3_origin, backEnd.viewParms.ori.axis[0], normal ); tess.normal[ndx][0] = tess.normal[ndx+1][0] = tess.normal[ndx+2][0] = tess.normal[ndx+3][0] = normal[0]; tess.normal[ndx][1] = tess.normal[ndx+1][1] = tess.normal[ndx+2][1] = tess.normal[ndx+3][1] = normal[1]; tess.normal[ndx][2] = tess.normal[ndx+1][2] = tess.normal[ndx+2][2] = tess.normal[ndx+3][2] = normal[2]; // standard square texture coordinates tess.texCoords[ndx][0][0] = tess.texCoords[ndx][1][0] = s1; tess.texCoords[ndx][0][1] = tess.texCoords[ndx][1][1] = t1; tess.texCoords[ndx+1][0][0] = tess.texCoords[ndx+1][1][0] = s2; tess.texCoords[ndx+1][0][1] = tess.texCoords[ndx+1][1][1] = t1; tess.texCoords[ndx+2][0][0] = tess.texCoords[ndx+2][1][0] = s2; tess.texCoords[ndx+2][0][1] = tess.texCoords[ndx+2][1][1] = t2; tess.texCoords[ndx+3][0][0] = tess.texCoords[ndx+3][1][0] = s1; tess.texCoords[ndx+3][0][1] = tess.texCoords[ndx+3][1][1] = t2; // constant color all the way around // should this be identity and let the shader specify from entity? byteAlias_t *baSource = (byteAlias_t *)color, *baDest; baDest = (byteAlias_t *)&tess.vertexColors[ndx + 0]; baDest->ui = baSource->ui; baDest = (byteAlias_t *)&tess.vertexColors[ndx + 1]; baDest->ui = baSource->ui; baDest = (byteAlias_t *)&tess.vertexColors[ndx + 2]; baDest->ui = baSource->ui; baDest = (byteAlias_t *)&tess.vertexColors[ndx + 3]; baDest->ui = baSource->ui; tess.numVertexes += 4; tess.numIndexes += 6; } /* ============== RB_AddQuadStamp ============== */ void RB_AddQuadStamp( vec3_t origin, vec3_t left, vec3_t up, byte *color ) { RB_AddQuadStampExt( origin, left, up, color, 0, 0, 1, 1 ); } /* ============== RB_SurfaceSprite ============== */ static void RB_SurfaceSprite( void ) { vec3_t left, up; float radius; // calculate the xyz locations for the four corners radius = backEnd.currentEntity->e.radius; if ( backEnd.currentEntity->e.rotation == 0 ) { VectorScale( backEnd.viewParms.ori.axis[1], radius, left ); VectorScale( backEnd.viewParms.ori.axis[2], radius, up ); } else { float s, c; float ang; ang = M_PI * backEnd.currentEntity->e.rotation / 180; s = sin( ang ); c = cos( ang ); VectorScale( backEnd.viewParms.ori.axis[1], c * radius, left ); VectorMA( left, -s * radius, backEnd.viewParms.ori.axis[2], left ); VectorScale( backEnd.viewParms.ori.axis[2], c * radius, up ); VectorMA( up, s * radius, backEnd.viewParms.ori.axis[1], up ); } if ( backEnd.viewParms.isMirror ) { VectorSubtract( vec3_origin, left, left ); } RB_AddQuadStamp( backEnd.currentEntity->e.origin, left, up, backEnd.currentEntity->e.shaderRGBA ); } /* ======================= RB_SurfaceOrientedQuad ======================= */ static void RB_SurfaceOrientedQuad( void ) { vec3_t left, up; float radius; // calculate the xyz locations for the four corners radius = backEnd.currentEntity->e.radius; MakeNormalVectors( backEnd.currentEntity->e.axis[0], left, up ); if ( backEnd.currentEntity->e.rotation == 0 ) { VectorScale( left, radius, left ); VectorScale( up, radius, up ); } else { vec3_t tempLeft, tempUp; float s, c; float ang; ang = M_PI * backEnd.currentEntity->e.rotation / 180; s = sin( ang ); c = cos( ang ); // Use a temp so we don't trash the values we'll need later VectorScale( left, c * radius, tempLeft ); VectorMA( tempLeft, -s * radius, up, tempLeft ); VectorScale( up, c * radius, tempUp ); VectorMA( tempUp, s * radius, left, up ); // no need to use the temp anymore, so copy into the dest vector ( up ) // This was copied for safekeeping, we're done, so we can move it back to left VectorCopy( tempLeft, left ); } if ( backEnd.viewParms.isMirror ) { VectorSubtract( vec3_origin, left, left ); } RB_AddQuadStamp( backEnd.currentEntity->e.origin, left, up, backEnd.currentEntity->e.shaderRGBA ); } /* ============== RB_SurfaceLine ============== */ // // Values for a proper line render primitive... // Width // STScale (how many times to loop a texture) // alpha // RGB // // Values for proper line object... // lifetime // dscale // startalpha, endalpha // startRGB, endRGB // static void DoLine( const vec3_t start, const vec3_t end, const vec3_t up, float spanWidth ) { float spanWidth2; int vbase; RB_CHECKOVERFLOW( 4, 6 ); vbase = tess.numVertexes; spanWidth2 = -spanWidth; VectorMA( start, spanWidth, up, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = 0; tess.texCoords[tess.numVertexes][0][1] = 0; tess.vertexColors[tess.numVertexes][0] = backEnd.currentEntity->e.shaderRGBA[0];// * 0.25;//wtf??not sure why the code would be doing this tess.vertexColors[tess.numVertexes][1] = backEnd.currentEntity->e.shaderRGBA[1];// * 0.25; tess.vertexColors[tess.numVertexes][2] = backEnd.currentEntity->e.shaderRGBA[2];// * 0.25; tess.vertexColors[tess.numVertexes][3] = backEnd.currentEntity->e.shaderRGBA[3];// * 0.25; tess.numVertexes++; VectorMA( start, spanWidth2, up, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = 1;//backEnd.currentEntity->e.shaderTexCoord[0]; tess.texCoords[tess.numVertexes][0][1] = 0; tess.vertexColors[tess.numVertexes][0] = backEnd.currentEntity->e.shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = backEnd.currentEntity->e.shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = backEnd.currentEntity->e.shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = backEnd.currentEntity->e.shaderRGBA[3]; tess.numVertexes++; VectorMA( end, spanWidth, up, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = 0; tess.texCoords[tess.numVertexes][0][1] = 1;//backEnd.currentEntity->e.shaderTexCoord[1]; tess.vertexColors[tess.numVertexes][0] = backEnd.currentEntity->e.shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = backEnd.currentEntity->e.shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = backEnd.currentEntity->e.shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = backEnd.currentEntity->e.shaderRGBA[3]; tess.numVertexes++; VectorMA( end, spanWidth2, up, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = 1;//backEnd.currentEntity->e.shaderTexCoord[0]; tess.texCoords[tess.numVertexes][0][1] = 1;//backEnd.currentEntity->e.shaderTexCoord[1]; tess.vertexColors[tess.numVertexes][0] = backEnd.currentEntity->e.shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = backEnd.currentEntity->e.shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = backEnd.currentEntity->e.shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = backEnd.currentEntity->e.shaderRGBA[3]; tess.numVertexes++; tess.indexes[tess.numIndexes++] = vbase; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 2; tess.indexes[tess.numIndexes++] = vbase + 2; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 3; } static void DoLine2( const vec3_t start, const vec3_t end, const vec3_t up, float spanWidth, float spanWidth2, const float tcStart, const float tcEnd ) { int vbase; RB_CHECKOVERFLOW( 4, 6 ); vbase = tess.numVertexes; VectorMA( start, spanWidth, up, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = 0; tess.texCoords[tess.numVertexes][0][1] = tcStart; tess.vertexColors[tess.numVertexes][0] = backEnd.currentEntity->e.shaderRGBA[0];// * 0.25;//wtf??not sure why the code would be doing this tess.vertexColors[tess.numVertexes][1] = backEnd.currentEntity->e.shaderRGBA[1];// * 0.25; tess.vertexColors[tess.numVertexes][2] = backEnd.currentEntity->e.shaderRGBA[2];// * 0.25; tess.vertexColors[tess.numVertexes][3] = backEnd.currentEntity->e.shaderRGBA[3];// * 0.25; tess.numVertexes++; VectorMA( start, -spanWidth, up, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = 1;//backEnd.currentEntity->e.shaderTexCoord[0]; tess.texCoords[tess.numVertexes][0][1] = tcStart; tess.vertexColors[tess.numVertexes][0] = backEnd.currentEntity->e.shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = backEnd.currentEntity->e.shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = backEnd.currentEntity->e.shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = backEnd.currentEntity->e.shaderRGBA[3]; tess.numVertexes++; VectorMA( end, spanWidth2, up, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = 0; tess.texCoords[tess.numVertexes][0][1] = tcEnd;//backEnd.currentEntity->e.shaderTexCoord[1]; tess.vertexColors[tess.numVertexes][0] = backEnd.currentEntity->e.shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = backEnd.currentEntity->e.shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = backEnd.currentEntity->e.shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = backEnd.currentEntity->e.shaderRGBA[3]; tess.numVertexes++; VectorMA( end, -spanWidth2, up, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = 1;//backEnd.currentEntity->e.shaderTexCoord[0]; tess.texCoords[tess.numVertexes][0][1] = tcEnd;//backEnd.currentEntity->e.shaderTexCoord[1]; tess.vertexColors[tess.numVertexes][0] = backEnd.currentEntity->e.shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = backEnd.currentEntity->e.shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = backEnd.currentEntity->e.shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = backEnd.currentEntity->e.shaderRGBA[3]; tess.numVertexes++; tess.indexes[tess.numIndexes++] = vbase; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 2; tess.indexes[tess.numIndexes++] = vbase + 2; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 3; } //----------------- // RB_SurfaceLine //----------------- static void RB_SurfaceLine( void ) { refEntity_t *e; vec3_t right; vec3_t start, end; vec3_t v1, v2; e = &backEnd.currentEntity->e; VectorCopy( e->oldorigin, end ); VectorCopy( e->origin, start ); // compute side vector VectorSubtract( start, backEnd.viewParms.ori.origin, v1 ); VectorSubtract( end, backEnd.viewParms.ori.origin, v2 ); CrossProduct( v1, v2, right ); VectorNormalize( right ); DoLine( start, end, right, e->radius); } /* ============== RB_SurfaceCylinder ============== */ #define NUM_CYLINDER_SEGMENTS 40 // e->origin holds the bottom point // e->oldorigin holds the top point // e->radius holds the radius // If a cylinder has a tapered end that has a very small radius, the engine converts it to a cone. Not a huge savings, but the texture mapping is slightly better // and it uses half as many indicies as the cylinder version //------------------------------------- static void RB_SurfaceCone( void ) //------------------------------------- { static vec3_t points[NUM_CYLINDER_SEGMENTS]; vec3_t vr, vu, midpoint; vec3_t tapered, base; float detail, length; int i; int segments; refEntity_t *e; e = &backEnd.currentEntity->e; //Work out the detail level of this cylinder VectorAdd( e->origin, e->oldorigin, midpoint ); VectorScale(midpoint, 0.5, midpoint); // Average start and end VectorSubtract( midpoint, backEnd.viewParms.ori.origin, midpoint ); length = VectorNormalize( midpoint ); // this doesn't need to be perfect....just a rough compensation for zoom level is enough length *= (backEnd.viewParms.fovX / 90.0f); detail = 1 - ((float) length / 2048 ); segments = NUM_CYLINDER_SEGMENTS * detail; // 3 is the absolute minimum, but the pop between 3-8 is too noticeable if ( segments < 8 ) { segments = 8; } if ( segments > NUM_CYLINDER_SEGMENTS ) { segments = NUM_CYLINDER_SEGMENTS; } // Get the direction vector MakeNormalVectors( e->axis[0], vr, vu ); // we only need to rotate around the larger radius, the smaller radius get's welded if ( e->radius < e->backlerp ) { VectorScale( vu, e->backlerp, vu ); VectorCopy( e->origin, base ); VectorCopy( e->oldorigin, tapered ); } else { VectorScale( vu, e->radius, vu ); VectorCopy( e->origin, tapered ); VectorCopy( e->oldorigin, base ); } // Calculate the step around the cylinder detail = 360.0f / (float)segments; for ( i = 0; i < segments; i++ ) { // ring RotatePointAroundVector( points[i], e->axis[0], vu, detail * i ); VectorAdd( points[i], base, points[i] ); } // Calculate the texture coords so the texture can wrap around the whole cylinder detail = 1.0f / (float)segments; RB_CHECKOVERFLOW( 2 * (segments+1), 3 * segments ); // this isn't 100% accurate int vbase = tess.numVertexes; for ( i = 0; i < segments; i++ ) { VectorCopy( points[i], tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = detail * i; tess.texCoords[tess.numVertexes][0][1] = 1.0f; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; // We could add this vert once, but using the given texture mapping method, we need to generate different texture coordinates VectorCopy( tapered, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = detail * i + detail * 0.5f; // set the texture coordinates to the point half-way between the untapered ends....but on the other end of the texture tess.texCoords[tess.numVertexes][0][1] = 0.0f; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; } // last point has the same verts as the first, but does not share the same tex coords, so we have to duplicate it VectorCopy( points[0], tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = detail * i; tess.texCoords[tess.numVertexes][0][1] = 1.0f; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorCopy( tapered, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = detail * i + detail * 0.5f; tess.texCoords[tess.numVertexes][0][1] = 0.0f; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; // do the welding for ( i = 0; i < segments; i++ ) { tess.indexes[tess.numIndexes++] = vbase; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 2; vbase += 2; } } //------------------------------------- static void RB_SurfaceCylinder( void ) //------------------------------------- { static vec3_t lower_points[NUM_CYLINDER_SEGMENTS], upper_points[NUM_CYLINDER_SEGMENTS]; vec3_t vr, vu, midpoint, v1; float detail, length; int i; int segments; refEntity_t *e; e = &backEnd.currentEntity->e; // check for tapering if ( !( e->radius < 0.3f && e->backlerp < 0.3f) && ( e->radius < 0.3f || e->backlerp < 0.3f )) { // One end is sufficiently tapered to consider changing it to a cone RB_SurfaceCone(); return; } //Work out the detail level of this cylinder VectorAdd( e->origin, e->oldorigin, midpoint ); VectorScale(midpoint, 0.5, midpoint); // Average start and end VectorSubtract( midpoint, backEnd.viewParms.ori.origin, midpoint ); length = VectorNormalize( midpoint ); // this doesn't need to be perfect....just a rough compensation for zoom level is enough length *= (backEnd.viewParms.fovX / 90.0f); detail = 1 - ((float) length / 2048 ); segments = NUM_CYLINDER_SEGMENTS * detail; // 3 is the absolute minimum, but the pop between 3-8 is too noticeable if ( segments < 8 ) { segments = 8; } if ( segments > NUM_CYLINDER_SEGMENTS ) { segments = NUM_CYLINDER_SEGMENTS; } //Get the direction vector MakeNormalVectors( e->axis[0], vr, vu ); VectorScale( vu, e->radius, v1 ); // size1 VectorScale( vu, e->backlerp, vu ); // size2 // Calculate the step around the cylinder detail = 360.0f / (float)segments; for ( i = 0; i < segments; i++ ) { //Upper ring RotatePointAroundVector( upper_points[i], e->axis[0], vu, detail * i ); VectorAdd( upper_points[i], e->origin, upper_points[i] ); //Lower ring RotatePointAroundVector( lower_points[i], e->axis[0], v1, detail * i ); VectorAdd( lower_points[i], e->oldorigin, lower_points[i] ); } // Calculate the texture coords so the texture can wrap around the whole cylinder detail = 1.0f / (float)segments; RB_CHECKOVERFLOW( 2 * (segments+1), 6 * segments ); // this isn't 100% accurate int vbase = tess.numVertexes; for ( i = 0; i < segments; i++ ) { VectorCopy( upper_points[i], tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = detail * i; tess.texCoords[tess.numVertexes][0][1] = 1.0f; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorCopy( lower_points[i], tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = detail * i; tess.texCoords[tess.numVertexes][0][1] = 0.0f; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; } // last point has the same verts as the first, but does not share the same tex coords, so we have to duplicate it VectorCopy( upper_points[0], tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = detail * i; tess.texCoords[tess.numVertexes][0][1] = 1.0f; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorCopy( lower_points[0], tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = detail * i; tess.texCoords[tess.numVertexes][0][1] = 0.0f; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; // glue the verts for ( i = 0; i < segments; i++ ) { tess.indexes[tess.numIndexes++] = vbase; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 2; tess.indexes[tess.numIndexes++] = vbase + 2; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 3; vbase += 2; } } static vec3_t sh1, sh2; static int f_count; // Up front, we create a random "shape", then apply that to each line segment...and then again to each of those segments...kind of like a fractal //---------------------------------------------------------------------------- static void CreateShape() //---------------------------------------------------------------------------- { VectorSet( sh1, 0.66f,// + Q_flrand(-1.0f, 1.0f) * 0.1f, // fwd 0.08f + Q_flrand(-1.0f, 1.0f) * 0.02f, 0.08f + Q_flrand(-1.0f, 1.0f) * 0.02f ); // it seems to look best to have a point on one side of the ideal line, then the other point on the other side. VectorSet( sh2, 0.33f,// + Q_flrand(-1.0f, 1.0f) * 0.1f, // fwd -sh1[1] + Q_flrand(-1.0f, 1.0f) * 0.02f, // forcing point to be on the opposite side of the line -- right -sh1[2] + Q_flrand(-1.0f, 1.0f) * 0.02f );// up } //---------------------------------------------------------------------------- static void ApplyShape( vec3_t start, vec3_t end, vec3_t right, float sradius, float eradius, int count, float startPerc, float endPerc ) //---------------------------------------------------------------------------- { vec3_t point1, point2, fwd; vec3_t rt, up; float perc, dis; if ( count < 1 ) { // done recursing DoLine2( start, end, right, sradius, eradius, startPerc, endPerc ); return; } CreateShape(); VectorSubtract( end, start, fwd ); dis = VectorNormalize( fwd ) * 0.7f; MakeNormalVectors( fwd, rt, up ); perc = sh1[0]; VectorScale( start, perc, point1 ); VectorMA( point1, 1.0f - perc, end, point1 ); VectorMA( point1, dis * sh1[1], rt, point1 ); VectorMA( point1, dis * sh1[2], up, point1 ); // do a quick and dirty interpolation of the radius at that point float rads1, rads2; rads1 = sradius * 0.666f + eradius * 0.333f; rads2 = sradius * 0.333f + eradius * 0.666f; // recursion ApplyShape( start, point1, right, sradius, rads1, count - 1, startPerc, startPerc * 0.666f + endPerc * 0.333f ); perc = sh2[0]; VectorScale( start, perc, point2 ); VectorMA( point2, 1.0f - perc, end, point2 ); VectorMA( point2, dis * sh2[1], rt, point2 ); VectorMA( point2, dis * sh2[2], up, point2 ); // recursion ApplyShape( point2, point1, right, rads1, rads2, count - 1, startPerc * 0.333f + endPerc * 0.666f, startPerc * 0.666f + endPerc * 0.333f ); ApplyShape( point2, end, right, rads2, eradius, count - 1, startPerc * 0.333f + endPerc * 0.666f, endPerc ); } //---------------------------------------------------------------------------- static void DoBoltSeg( vec3_t start, vec3_t end, vec3_t right, float radius ) //---------------------------------------------------------------------------- { refEntity_t *e; vec3_t fwd, old; vec3_t cur, off={10,10,10}; vec3_t rt, up; vec3_t temp; int i; float dis, oldPerc = 0.0f, perc, oldRadius, newRadius; e = &backEnd.currentEntity->e; VectorSubtract( end, start, fwd ); dis = VectorNormalize( fwd ); if (dis > 2000) //freaky long { // ri.Printf( PRINT_WARNING, "DoBoltSeg: insane distance.\n" ); dis = 2000; } MakeNormalVectors( fwd, rt, up ); VectorCopy( start, old ); newRadius = oldRadius = radius; for ( i = 16; i <= dis; i+= 16 ) { // because of our large step size, we may not actually draw to the end. In this case, fudge our percent so that we are basically complete if ( i + 16 > dis ) { perc = 1.0f; } else { // percentage of the amount of line completed perc = (float)i / dis; } // create our level of deviation for this point VectorScale( fwd, Q_crandom(&e->frame) * 3.0f, temp ); // move less in fwd direction, chaos also does not affect this VectorMA( temp, Q_crandom(&e->frame) * 7.0f * e->angles[0], rt, temp ); // move more in direction perpendicular to line, angles is really the chaos VectorMA( temp, Q_crandom(&e->frame) * 7.0f * e->angles[0], up, temp ); // move more in direction perpendicular to line // track our total level of offset from the ideal line VectorAdd( off, temp, off ); // Move from start to end, always adding our current level of offset from the ideal line // Even though we are adding a random offset.....by nature, we always move from exactly start....to end VectorAdd( start, off, cur ); VectorScale( cur, 1.0f - perc, cur ); VectorMA( cur, perc, end, cur ); if ( e->renderfx & RF_TAPERED ) { // This does pretty close to perfect tapering since apply shape interpolates the old and new as it goes along. // by using one minus the square, the radius stays fairly constant, then drops off quickly at the very point of the bolt oldRadius = radius * (1.0f-oldPerc*oldPerc); newRadius = radius * (1.0f-perc*perc); } // Apply the random shape to our line seg to give it some micro-detail-jaggy-coolness. ApplyShape( cur, old, right, newRadius, oldRadius, 2 - r_lodbias->integer, 0, 1 ); // randomly split off to create little tendrils, but don't do it too close to the end and especially if we are not even of the forked variety if (( e->renderfx & RF_FORKED ) && f_count > 0 && Q_random(&e->frame) > 0.93f && (1.0f - perc) > 0.8f ) { vec3_t newDest; f_count--; // Pick a point somewhere between the current point and the final endpoint VectorAdd( cur, e->oldorigin, newDest ); VectorScale( newDest, 0.5f, newDest ); // And then add some crazy offset for ( int t = 0; t < 3; t++ ) { newDest[t] += Q_crandom(&e->frame) * 80; } // we could branch off using OLD and NEWDEST, but that would allow multiple forks...whereas, we just want simpler brancing DoBoltSeg( cur, newDest, right, newRadius ); } // Current point along the line becomes our new old attach point VectorCopy( cur, old ); oldPerc = perc; } } //------------------------------------------ static void RB_SurfaceElectricity() //------------------------------------------ { refEntity_t *e; vec3_t right, fwd; vec3_t start, end; vec3_t v1, v2; float radius, perc = 1.0f, dis; e = &backEnd.currentEntity->e; radius = e->radius; VectorCopy( e->origin, start ); VectorSubtract( e->oldorigin, start, fwd ); dis = VectorNormalize( fwd ); // see if we should grow from start to end if ( e->renderfx & RF_GROW ) { perc = 1.0f - ( e->endTime - tr.refdef.time ) / e->angles[1]/*duration*/; if ( perc > 1.0f ) { perc = 1.0f; } else if ( perc < 0.0f ) { perc = 0.0f; } } VectorMA( start, perc * dis, fwd, e->oldorigin ); VectorCopy( e->oldorigin, end ); // compute side vector VectorSubtract( start, backEnd.viewParms.ori.origin, v1 ); VectorSubtract( end, backEnd.viewParms.ori.origin, v2 ); CrossProduct( v1, v2, right ); VectorNormalize( right ); // allow now more than three branches on branch type electricity f_count = 3; DoBoltSeg( start, end, right, radius ); } /* ============= RB_SurfacePolychain ============= */ /* // we could try to do something similar to this to get better normals into the tess for these types of surfs. As it stands, any shader pass that // requires a normal ( env map ) will not work properly since the normals seem to essentially be random garbage. void RB_SurfacePolychain( srfPoly_t *p ) { int i; int numv; vec3_t a,b,normal={1,0,0}; RB_CHECKOVERFLOW( p->numVerts, 3*(p->numVerts - 2) ); if ( p->numVerts >= 3 ) { VectorSubtract( p->verts[0].xyz, p->verts[1].xyz, a ); VectorSubtract( p->verts[2].xyz, p->verts[1].xyz, b ); CrossProduct( a,b, normal ); VectorNormalize( normal ); } // fan triangles into the tess array numv = tess.numVertexes; for ( i = 0; i < p->numVerts; i++ ) { VectorCopy( p->verts[i].xyz, tess.xyz[numv] ); tess.texCoords[numv][0][0] = p->verts[i].st[0]; tess.texCoords[numv][0][1] = p->verts[i].st[1]; VectorCopy( normal, tess.normal[numv] ); *(int *)&tess.vertexColors[numv] = *(int *)p->verts[ i ].modulate; numv++; } // generate fan indexes into the tess array for ( i = 0; i < p->numVerts-2; i++ ) { tess.indexes[tess.numIndexes + 0] = tess.numVertexes; tess.indexes[tess.numIndexes + 1] = tess.numVertexes + i + 1; tess.indexes[tess.numIndexes + 2] = tess.numVertexes + i + 2; tess.numIndexes += 3; } tess.numVertexes = numv; } */ void RB_SurfacePolychain( srfPoly_t *p ) { int i; int numv; RB_CHECKOVERFLOW( p->numVerts, 3*(p->numVerts - 2) ); // fan triangles into the tess array numv = tess.numVertexes; for ( i = 0; i < p->numVerts; i++ ) { VectorCopy( p->verts[i].xyz, tess.xyz[numv] ); tess.texCoords[numv][0][0] = p->verts[i].st[0]; tess.texCoords[numv][0][1] = p->verts[i].st[1]; byteAlias_t *baDest = (byteAlias_t *)&tess.vertexColors[numv++], *baSource = (byteAlias_t *)&p->verts[ i ].modulate; baDest->i = baSource->i; } // generate fan indexes into the tess array for ( i = 0; i < p->numVerts-2; i++ ) { tess.indexes[tess.numIndexes + 0] = tess.numVertexes; tess.indexes[tess.numIndexes + 1] = tess.numVertexes + i + 1; tess.indexes[tess.numIndexes + 2] = tess.numVertexes + i + 2; tess.numIndexes += 3; } tess.numVertexes = numv; } inline static uint32_t ComputeFinalVertexColor( const byte *colors ) { int k; byteAlias_t result; uint32_t r, g, b; for ( k=0; k<4; k++ ) result.b[k] = colors[k]; if ( tess.shader->lightmapIndex[0] != LIGHTMAP_BY_VERTEX ) return result.ui; if ( r_fullbright->integer ) { result.b[0] = 255; result.b[1] = 255; result.b[2] = 255; return result.ui; } // an optimization could be added here to compute the style[0] (which is always the world normal light) r = g = b = 0; for( k=0; k<MAXLIGHTMAPS; k++ ) { if ( tess.shader->styles[k] < LS_UNUSED ) { byte *styleColor = styleColors[tess.shader->styles[k]]; r += (uint32_t)(*colors++) * (uint32_t)(*styleColor++); g += (uint32_t)(*colors++) * (uint32_t)(*styleColor++); b += (uint32_t)(*colors++) * (uint32_t)(*styleColor); colors++; } else break; } result.b[0] = Com_Clamp( 0, 255, r >> 8 ); result.b[1] = Com_Clamp( 0, 255, g >> 8 ); result.b[2] = Com_Clamp( 0, 255, b >> 8 ); return result.ui; } /* ============= RB_SurfaceTriangles ============= */ void RB_SurfaceTriangles( srfTriangles_t *srf ) { int i, k; drawVert_t *dv; float *xyz, *normal, *texCoords; byte *color; int dlightBits; dlightBits = srf->dlightBits; tess.dlightBits |= dlightBits; RB_CHECKOVERFLOW( srf->numVerts, srf->numIndexes ); for ( i = 0 ; i < srf->numIndexes ; i += 3 ) { tess.indexes[ tess.numIndexes + i + 0 ] = tess.numVertexes + srf->indexes[ i + 0 ]; tess.indexes[ tess.numIndexes + i + 1 ] = tess.numVertexes + srf->indexes[ i + 1 ]; tess.indexes[ tess.numIndexes + i + 2 ] = tess.numVertexes + srf->indexes[ i + 2 ]; } tess.numIndexes += srf->numIndexes; dv = srf->verts; xyz = tess.xyz[ tess.numVertexes ]; normal = tess.normal[ tess.numVertexes ]; texCoords = tess.texCoords[ tess.numVertexes ][0]; color = tess.vertexColors[ tess.numVertexes ]; for ( i = 0 ; i < srf->numVerts ; i++, dv++) { xyz[0] = dv->xyz[0]; xyz[1] = dv->xyz[1]; xyz[2] = dv->xyz[2]; xyz += 4; //if ( needsNormal ) { normal[0] = dv->normal[0]; normal[1] = dv->normal[1]; normal[2] = dv->normal[2]; } normal += 4; texCoords[0] = dv->st[0]; texCoords[1] = dv->st[1]; for(k=0;k<MAXLIGHTMAPS;k++) { if (tess.shader->lightmapIndex[k] >= 0) { texCoords[2+(k*2)] = dv->lightmap[k][0]; texCoords[2+(k*2)+1] = dv->lightmap[k][1]; } else { // can't have an empty slot in the middle, so we are done break; } } texCoords += NUM_TEX_COORDS*2; *(unsigned *)color = ComputeFinalVertexColor((byte *)dv->color); color += 4; } for ( i = 0 ; i < srf->numVerts ; i++ ) { tess.vertexDlightBits[ tess.numVertexes + i] = dlightBits; } tess.numVertexes += srf->numVerts; } /* ============== RB_SurfaceBeam ============== */ static void RB_SurfaceBeam( void ) { #define NUM_BEAM_SEGS 6 refEntity_t *e; int i; vec3_t perpvec; vec3_t direction, normalized_direction; vec3_t start_points[NUM_BEAM_SEGS], end_points[NUM_BEAM_SEGS]; vec3_t oldorigin, origin; e = &backEnd.currentEntity->e; oldorigin[0] = e->oldorigin[0]; oldorigin[1] = e->oldorigin[1]; oldorigin[2] = e->oldorigin[2]; origin[0] = e->origin[0]; origin[1] = e->origin[1]; origin[2] = e->origin[2]; normalized_direction[0] = direction[0] = oldorigin[0] - origin[0]; normalized_direction[1] = direction[1] = oldorigin[1] - origin[1]; normalized_direction[2] = direction[2] = oldorigin[2] - origin[2]; if ( VectorNormalize( normalized_direction ) == 0 ) return; PerpendicularVector( perpvec, normalized_direction ); VectorScale( perpvec, 4, perpvec ); for ( i = 0; i < NUM_BEAM_SEGS ; i++ ) { RotatePointAroundVector( start_points[i], normalized_direction, perpvec, (360.0/NUM_BEAM_SEGS)*i ); // VectorAdd( start_points[i], origin, start_points[i] ); VectorAdd( start_points[i], direction, end_points[i] ); } GL_Bind( tr.whiteImage ); GL_State( GLS_SRCBLEND_ONE | GLS_DSTBLEND_ONE ); switch(e->skinNum) { case 1://Green qglColor3f( 0, 1, 0 ); break; case 2://Blue qglColor3f( 0.5, 0.5, 1 ); break; case 0://red default: qglColor3f( 1, 0, 0 ); break; } qglBegin( GL_TRIANGLE_STRIP ); for ( i = 0; i <= NUM_BEAM_SEGS; i++ ) { qglVertex3fv( start_points[ i % NUM_BEAM_SEGS] ); qglVertex3fv( end_points[ i % NUM_BEAM_SEGS] ); } qglEnd(); } //------------------ // DoSprite //------------------ static void DoSprite( vec3_t origin, float radius, float rotation ) { float s, c; float ang; vec3_t left, up; ang = M_PI * rotation / 180.0f; s = sin( ang ); c = cos( ang ); VectorScale( backEnd.viewParms.ori.axis[1], c * radius, left ); VectorMA( left, -s * radius, backEnd.viewParms.ori.axis[2], left ); VectorScale( backEnd.viewParms.ori.axis[2], c * radius, up ); VectorMA( up, s * radius, backEnd.viewParms.ori.axis[1], up ); if ( backEnd.viewParms.isMirror ) { VectorSubtract( vec3_origin, left, left ); } RB_AddQuadStamp( origin, left, up, backEnd.currentEntity->e.shaderRGBA ); } //------------------ // RB_SurfaceSaber //------------------ static void RB_SurfaceSaberGlow() { vec3_t end; refEntity_t *e; e = &backEnd.currentEntity->e; // Render the glow part of the blade for ( float i = e->saberLength; i > 0; i -= e->radius * 0.65f ) { VectorMA( e->origin, i, e->axis[0], end ); DoSprite( end, e->radius, 0.0f );//Q_flrand(0.0f, 1.0f) * 360.0f ); e->radius += 0.017f; } // Big hilt sprite // Please don't kill me Pat...I liked the hilt glow blob, but wanted a subtle pulse.:) Feel free to ditch it if you don't like it. --Jeff // Please don't kill me Jeff... The pulse is good, but now I want the halo bigger if the saber is shorter... --Pat DoSprite( e->origin, 5.5f + Q_flrand(0.0f, 1.0f) * 0.25f, 0.0f );//Q_flrand(0.0f, 1.0f) * 360.0f ); } /* ** LerpMeshVertexes */ static void LerpMeshVertexes (md3Surface_t *surf, float backlerp) { short *oldXyz, *newXyz, *oldNormals, *newNormals; float *outXyz, *outNormal; float oldXyzScale, newXyzScale; float oldNormalScale, newNormalScale; int vertNum; unsigned lat, lng; int numVerts; outXyz = tess.xyz[tess.numVertexes]; outNormal = tess.normal[tess.numVertexes]; newXyz = (short *)((byte *)surf + surf->ofsXyzNormals) + (backEnd.currentEntity->e.frame * surf->numVerts * 4); newNormals = newXyz + 3; newXyzScale = MD3_XYZ_SCALE * (1.0 - backlerp); newNormalScale = 1.0 - backlerp; numVerts = surf->numVerts; if ( backlerp == 0 ) { // // just copy the vertexes // for (vertNum=0 ; vertNum < numVerts ; vertNum++, newXyz += 4, newNormals += 4, outXyz += 4, outNormal += 4) { outXyz[0] = newXyz[0] * newXyzScale; outXyz[1] = newXyz[1] * newXyzScale; outXyz[2] = newXyz[2] * newXyzScale; lat = ( newNormals[0] >> 8 ) & 0xff; lng = ( newNormals[0] & 0xff ); lat *= (FUNCTABLE_SIZE/256); lng *= (FUNCTABLE_SIZE/256); // decode X as cos( lat ) * sin( long ) // decode Y as sin( lat ) * sin( long ) // decode Z as cos( long ) outNormal[0] = tr.sinTable[(lat+(FUNCTABLE_SIZE/4))&FUNCTABLE_MASK] * tr.sinTable[lng]; outNormal[1] = tr.sinTable[lat] * tr.sinTable[lng]; outNormal[2] = tr.sinTable[(lng+(FUNCTABLE_SIZE/4))&FUNCTABLE_MASK]; } } else { // // interpolate and copy the vertex and normal // oldXyz = (short *)((byte *)surf + surf->ofsXyzNormals) + (backEnd.currentEntity->e.oldframe * surf->numVerts * 4); oldNormals = oldXyz + 3; oldXyzScale = MD3_XYZ_SCALE * backlerp; oldNormalScale = backlerp; for (vertNum=0 ; vertNum < numVerts ; vertNum++, oldXyz += 4, newXyz += 4, oldNormals += 4, newNormals += 4, outXyz += 4, outNormal += 4) { vec3_t uncompressedOldNormal, uncompressedNewNormal; // interpolate the xyz outXyz[0] = oldXyz[0] * oldXyzScale + newXyz[0] * newXyzScale; outXyz[1] = oldXyz[1] * oldXyzScale + newXyz[1] * newXyzScale; outXyz[2] = oldXyz[2] * oldXyzScale + newXyz[2] * newXyzScale; // FIXME: interpolate lat/long instead? lat = ( newNormals[0] >> 8 ) & 0xff; lng = ( newNormals[0] & 0xff ); lat *= 4; lng *= 4; uncompressedNewNormal[0] = tr.sinTable[(lat+(FUNCTABLE_SIZE/4))&FUNCTABLE_MASK] * tr.sinTable[lng]; uncompressedNewNormal[1] = tr.sinTable[lat] * tr.sinTable[lng]; uncompressedNewNormal[2] = tr.sinTable[(lng+(FUNCTABLE_SIZE/4))&FUNCTABLE_MASK]; lat = ( oldNormals[0] >> 8 ) & 0xff; lng = ( oldNormals[0] & 0xff ); lat *= 4; lng *= 4; uncompressedOldNormal[0] = tr.sinTable[(lat+(FUNCTABLE_SIZE/4))&FUNCTABLE_MASK] * tr.sinTable[lng]; uncompressedOldNormal[1] = tr.sinTable[lat] * tr.sinTable[lng]; uncompressedOldNormal[2] = tr.sinTable[(lng+(FUNCTABLE_SIZE/4))&FUNCTABLE_MASK]; outNormal[0] = uncompressedOldNormal[0] * oldNormalScale + uncompressedNewNormal[0] * newNormalScale; outNormal[1] = uncompressedOldNormal[1] * oldNormalScale + uncompressedNewNormal[1] * newNormalScale; outNormal[2] = uncompressedOldNormal[2] * oldNormalScale + uncompressedNewNormal[2] * newNormalScale; VectorNormalize (outNormal); } } } /* ============= RB_SurfaceMesh ============= */ void RB_SurfaceMesh(md3Surface_t *surface) { int j; float backlerp; int *triangles; float *texCoords; int indexes; int Bob, Doug; int numVerts; if ( backEnd.currentEntity->e.oldframe == backEnd.currentEntity->e.frame ) { backlerp = 0; } else { backlerp = backEnd.currentEntity->e.backlerp; } RB_CHECKOVERFLOW( surface->numVerts, surface->numTriangles*3 ); LerpMeshVertexes (surface, backlerp); triangles = (int *) ((byte *)surface + surface->ofsTriangles); indexes = surface->numTriangles * 3; Bob = tess.numIndexes; Doug = tess.numVertexes; for (j = 0 ; j < indexes ; j++) { tess.indexes[Bob + j] = Doug + triangles[j]; } tess.numIndexes += indexes; texCoords = (float *) ((byte *)surface + surface->ofsSt); numVerts = surface->numVerts; for ( j = 0; j < numVerts; j++ ) { tess.texCoords[Doug + j][0][0] = texCoords[j*2+0]; tess.texCoords[Doug + j][0][1] = texCoords[j*2+1]; // FIXME: fill in lightmapST for completeness? } tess.numVertexes += surface->numVerts; } /* ============== RB_SurfaceFace ============== */ void RB_SurfaceFace( srfSurfaceFace_t *surf ) { int i, j, k; unsigned int *indices; glIndex_t *tessIndexes; float *v; float *normal; int ndx; int Bob; int numPoints; int dlightBits; byteAlias_t ba; RB_CHECKOVERFLOW( surf->numPoints, surf->numIndices ); dlightBits = surf->dlightBits; tess.dlightBits |= dlightBits; indices = ( unsigned * ) ( ( ( char * ) surf ) + surf->ofsIndices ); Bob = tess.numVertexes; tessIndexes = tess.indexes + tess.numIndexes; for ( i = surf->numIndices-1 ; i >= 0 ; i-- ) { tessIndexes[i] = indices[i] + Bob; } tess.numIndexes += surf->numIndices; v = surf->points[0]; ndx = tess.numVertexes; numPoints = surf->numPoints; //if ( tess.shader->needsNormal ) { normal = surf->plane.normal; for ( i = 0, ndx = tess.numVertexes; i < numPoints; i++, ndx++ ) { VectorCopy( normal, tess.normal[ndx] ); } } for ( i = 0, v = surf->points[0], ndx = tess.numVertexes; i < numPoints; i++, v += VERTEXSIZE, ndx++ ) { VectorCopy( v, tess.xyz[ndx]); tess.texCoords[ndx][0][0] = v[3]; tess.texCoords[ndx][0][1] = v[4]; for(k=0;k<MAXLIGHTMAPS;k++) { if (tess.shader->lightmapIndex[k] >= 0) { tess.texCoords[ndx][k+1][0] = v[VERTEX_LM+(k*2)]; tess.texCoords[ndx][k+1][1] = v[VERTEX_LM+(k*2)+1]; } else { break; } } ba.ui = ComputeFinalVertexColor( (byte *)&v[VERTEX_COLOR] ); for ( j=0; j<4; j++ ) tess.vertexColors[ndx][j] = ba.b[j]; tess.vertexDlightBits[ndx] = dlightBits; } tess.numVertexes += surf->numPoints; } static float LodErrorForVolume( vec3_t local, float radius ) { vec3_t world; float d; // never let it go negative if ( r_lodCurveError->value < 0 ) { return 0; } world[0] = local[0] * backEnd.ori.axis[0][0] + local[1] * backEnd.ori.axis[1][0] + local[2] * backEnd.ori.axis[2][0] + backEnd.ori.origin[0]; world[1] = local[0] * backEnd.ori.axis[0][1] + local[1] * backEnd.ori.axis[1][1] + local[2] * backEnd.ori.axis[2][1] + backEnd.ori.origin[1]; world[2] = local[0] * backEnd.ori.axis[0][2] + local[1] * backEnd.ori.axis[1][2] + local[2] * backEnd.ori.axis[2][2] + backEnd.ori.origin[2]; VectorSubtract( world, backEnd.viewParms.ori.origin, world ); d = DotProduct( world, backEnd.viewParms.ori.axis[0] ); if ( d < 0 ) { d = -d; } d -= radius; if ( d < 1 ) { d = 1; } return r_lodCurveError->value / d; } /* ============= RB_SurfaceGrid Just copy the grid of points and triangulate ============= */ void RB_SurfaceGrid( srfGridMesh_t *cv ) { int i, j, k; float *xyz; float *texCoords; float *normal; unsigned char *color; drawVert_t *dv; int rows, irows, vrows; int used; int widthTable[MAX_GRID_SIZE]; int heightTable[MAX_GRID_SIZE]; float lodError; int lodWidth, lodHeight; int numVertexes; int dlightBits; int *vDlightBits; dlightBits = cv->dlightBits; tess.dlightBits |= dlightBits; // determine the allowable discrepance lodError = LodErrorForVolume( cv->lodOrigin, cv->lodRadius ); // determine which rows and columns of the subdivision // we are actually going to use widthTable[0] = 0; lodWidth = 1; for ( i = 1 ; i < cv->width-1 ; i++ ) { if ( cv->widthLodError[i] <= lodError ) { widthTable[lodWidth] = i; lodWidth++; } } widthTable[lodWidth] = cv->width-1; lodWidth++; heightTable[0] = 0; lodHeight = 1; for ( i = 1 ; i < cv->height-1 ; i++ ) { if ( cv->heightLodError[i] <= lodError ) { heightTable[lodHeight] = i; lodHeight++; } } heightTable[lodHeight] = cv->height-1; lodHeight++; // very large grids may have more points or indexes than can be fit // in the tess structure, so we may have to issue it in multiple passes used = 0; rows = 0; while ( used < lodHeight - 1 ) { // see how many rows of both verts and indexes we can add without overflowing do { vrows = ( SHADER_MAX_VERTEXES - tess.numVertexes ) / lodWidth; irows = ( SHADER_MAX_INDEXES - tess.numIndexes ) / ( lodWidth * 6 ); // if we don't have enough space for at least one strip, flush the buffer if ( vrows < 2 || irows < 1 ) { RB_EndSurface(); RB_BeginSurface(tess.shader, tess.fogNum ); } else { break; } } while ( 1 ); rows = irows; if ( vrows < irows + 1 ) { rows = vrows - 1; } if ( used + rows > lodHeight ) { rows = lodHeight - used; } numVertexes = tess.numVertexes; xyz = tess.xyz[numVertexes]; normal = tess.normal[numVertexes]; texCoords = tess.texCoords[numVertexes][0]; color = ( unsigned char * ) &tess.vertexColors[numVertexes]; vDlightBits = &tess.vertexDlightBits[numVertexes]; for ( i = 0 ; i < rows ; i++ ) { for ( j = 0 ; j < lodWidth ; j++ ) { dv = cv->verts + heightTable[ used + i ] * cv->width + widthTable[ j ]; xyz[0] = dv->xyz[0]; xyz[1] = dv->xyz[1]; xyz[2] = dv->xyz[2]; xyz += 4; texCoords[0] = dv->st[0]; texCoords[1] = dv->st[1]; for(k=0;k<MAXLIGHTMAPS;k++) { texCoords[2+(k*2)]= dv->lightmap[k][0]; texCoords[2+(k*2)+1]= dv->lightmap[k][1]; } texCoords += NUM_TEX_COORDS*2; // if ( needsNormal ) { normal[0] = dv->normal[0]; normal[1] = dv->normal[1]; normal[2] = dv->normal[2]; } normal += 4; *(unsigned *)color = ComputeFinalVertexColor((byte *)dv->color); color += 4; *vDlightBits++ = dlightBits; } } // add the indexes { int numIndexes; int w, h; h = rows - 1; w = lodWidth - 1; numIndexes = tess.numIndexes; for (i = 0 ; i < h ; i++) { for (j = 0 ; j < w ; j++) { int v1, v2, v3, v4; // vertex order to be reckognized as tristrips v1 = numVertexes + i*lodWidth + j + 1; v2 = v1 - 1; v3 = v2 + lodWidth; v4 = v3 + 1; tess.indexes[numIndexes] = v2; tess.indexes[numIndexes+1] = v3; tess.indexes[numIndexes+2] = v1; tess.indexes[numIndexes+3] = v1; tess.indexes[numIndexes+4] = v3; tess.indexes[numIndexes+5] = v4; numIndexes += 6; } } tess.numIndexes = numIndexes; } tess.numVertexes += rows * lodWidth; used += rows - 1; } } #define LATHE_SEG_STEP 10 #define BEZIER_STEP 0.05f // must be in the range of 0 to 1 // FIXME: This function is horribly expensive static void RB_SurfaceLathe() { refEntity_t *e; vec2_t pt, oldpt, l_oldpt; vec2_t pt2, oldpt2, l_oldpt2; float bezierStep, latheStep; float temp, mu, mum1; float mum13, mu3, group1, group2; float s, c, d = 1.0f, pain = 0.0f; int i, t, vbase; e = &backEnd.currentEntity->e; if ( e->endTime && e->endTime > backEnd.refdef.time ) { d = 1.0f - ( e->endTime - backEnd.refdef.time ) / 1000.0f; } if ( e->frame && e->frame + 1000 > backEnd.refdef.time ) { pain = ( backEnd.refdef.time - e->frame ) / 1000.0f; // pain *= pain; pain = ( 1.0f - pain ) * 0.08f; } VectorSet2( l_oldpt, e->axis[0][0], e->axis[0][1] ); // do scalability stuff...r_lodbias 0-3 int lod = r_lodbias->integer + 1; if ( lod > 4 ) { lod = 4; } bezierStep = BEZIER_STEP * lod; latheStep = LATHE_SEG_STEP * lod; // Do bezier profile strip, then lathe this around to make a 3d model for ( mu = 0.0f; mu <= 1.01f * d; mu += bezierStep ) { // Four point curve mum1 = 1 - mu; mum13 = mum1 * mum1 * mum1; mu3 = mu * mu * mu; group1 = 3 * mu * mum1 * mum1; group2 = 3 * mu * mu *mum1; // Calc the current point on the curve for ( i = 0; i < 2; i++ ) { l_oldpt2[i] = mum13 * e->axis[0][i] + group1 * e->axis[1][i] + group2 * e->axis[2][i] + mu3 * e->oldorigin[i]; } VectorSet2( oldpt, l_oldpt[0], 0 ); VectorSet2( oldpt2, l_oldpt2[0], 0 ); // lathe patch section around in a complete circle for ( t = latheStep; t <= 360; t += latheStep ) { VectorSet2( pt, l_oldpt[0], 0 ); VectorSet2( pt2, l_oldpt2[0], 0 ); s = sin( DEG2RAD( t )); c = cos( DEG2RAD( t )); // rotate lathe points //c -s 0 //s c 0 //0 0 1 temp = c * pt[0] - s * pt[1]; pt[1] = s * pt[0] + c * pt[1]; pt[0] = temp; temp = c * pt2[0] - s * pt2[1]; pt2[1] = s * pt2[0] + c * pt2[1]; pt2[0] = temp; RB_CHECKOVERFLOW( 4, 6 ); vbase = tess.numVertexes; // Actually generate the necessary verts VectorSet( tess.normal[tess.numVertexes], oldpt[0], oldpt[1], l_oldpt[1] ); VectorAdd( e->origin, tess.normal[tess.numVertexes], tess.xyz[tess.numVertexes] ); VectorNormalize( tess.normal[tess.numVertexes] ); i = oldpt[0] * 0.1f + oldpt[1] * 0.1f; tess.texCoords[tess.numVertexes][0][0] = (t-latheStep)/360.0f; tess.texCoords[tess.numVertexes][0][1] = mu-bezierStep + cos( i + backEnd.refdef.floatTime ) * pain; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorSet( tess.normal[tess.numVertexes], oldpt2[0], oldpt2[1], l_oldpt2[1] ); VectorAdd( e->origin, tess.normal[tess.numVertexes], tess.xyz[tess.numVertexes] ); VectorNormalize( tess.normal[tess.numVertexes] ); i = oldpt2[0] * 0.1f + oldpt2[1] * 0.1f; tess.texCoords[tess.numVertexes][0][0] = (t-latheStep) / 360.0f; tess.texCoords[tess.numVertexes][0][1] = mu + cos( i + backEnd.refdef.floatTime ) * pain; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorSet( tess.normal[tess.numVertexes], pt[0], pt[1], l_oldpt[1] ); VectorAdd( e->origin, tess.normal[tess.numVertexes], tess.xyz[tess.numVertexes] ); VectorNormalize( tess.normal[tess.numVertexes] ); i = pt[0] * 0.1f + pt[1] * 0.1f; tess.texCoords[tess.numVertexes][0][0] = t/360.0f; tess.texCoords[tess.numVertexes][0][1] = mu-bezierStep + cos( i + backEnd.refdef.floatTime ) * pain; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorSet( tess.normal[tess.numVertexes], pt2[0], pt2[1], l_oldpt2[1] ); VectorAdd( e->origin, tess.normal[tess.numVertexes], tess.xyz[tess.numVertexes] ); VectorNormalize( tess.normal[tess.numVertexes] ); i = pt2[0] * 0.1f + pt2[1] * 0.1f; tess.texCoords[tess.numVertexes][0][0] = t/360.0f; tess.texCoords[tess.numVertexes][0][1] = mu + cos( i + backEnd.refdef.floatTime ) * pain; tess.vertexColors[tess.numVertexes][0] = e->shaderRGBA[0]; tess.vertexColors[tess.numVertexes][1] = e->shaderRGBA[1]; tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[2]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; tess.indexes[tess.numIndexes++] = vbase; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 3; tess.indexes[tess.numIndexes++] = vbase + 3; tess.indexes[tess.numIndexes++] = vbase + 2; tess.indexes[tess.numIndexes++] = vbase; // Shuffle new points to old VectorCopy2( pt, oldpt ); VectorCopy2( pt2, oldpt2 ); } // shuffle lathe points VectorCopy2( l_oldpt2, l_oldpt ); } } #define DISK_DEF 4 #define TUBE_DEF 6 static void RB_SurfaceClouds() { // Disk definition float diskStripDef[DISK_DEF] = { 0.0f, 0.4f, 0.7f, 1.0f }; float diskAlphaDef[DISK_DEF] = { 1.0f, 1.0f, 0.4f, 0.0f }; float diskCurveDef[DISK_DEF] = { 0.0f, 0.0f, 0.008f, 0.02f }; // tube definition float tubeStripDef[TUBE_DEF] = { 0.0f, 0.05f, 0.1f, 0.5f, 0.7f, 1.0f }; float tubeAlphaDef[TUBE_DEF] = { 0.0f, 0.45f, 1.0f, 1.0f, 0.45f, 0.0f }; float tubeCurveDef[TUBE_DEF] = { 0.0f, 0.004f, 0.006f, 0.01f, 0.006f, 0.0f }; refEntity_t *e; vec3_t pt, oldpt; vec3_t pt2, oldpt2; float latheStep = 30.0f; float s, c, temp; float *stripDef, *alphaDef, *curveDef, ct; int i, t, vbase; e = &backEnd.currentEntity->e; // select which type we shall be doing if ( e->renderfx & RF_GROW ) // doing tube type { ct = TUBE_DEF; stripDef = tubeStripDef; alphaDef = tubeAlphaDef; curveDef = tubeCurveDef; e->backlerp *= -1; // needs to be reversed } else { ct = DISK_DEF; stripDef = diskStripDef; alphaDef = diskAlphaDef; curveDef = diskCurveDef; } // do the strip def, then lathe this around to make a 3d model for ( i = 0; i < ct - 1; i++ ) { VectorSet( oldpt, (stripDef[i] * (e->radius - e->rotation)) + e->rotation, 0, curveDef[i] * e->radius * e->backlerp ); VectorSet( oldpt2, (stripDef[i+1] * (e->radius - e->rotation)) + e->rotation, 0, curveDef[i+1] * e->radius * e->backlerp ); // lathe section around in a complete circle for ( t = latheStep; t <= 360; t += latheStep ) { // rotate every time except last seg if ( t < 360.0f ) { VectorCopy( oldpt, pt ); VectorCopy( oldpt2, pt2 ); s = sin( DEG2RAD( latheStep )); c = cos( DEG2RAD( latheStep )); // rotate lathe points temp = c * pt[0] - s * pt[1]; // c -s 0 pt[1] = s * pt[0] + c * pt[1]; // s c 0 pt[0] = temp; // 0 0 1 temp = c * pt2[0] - s * pt2[1]; // c -s 0 pt2[1] = s * pt2[0] + c * pt2[1];// s c 0 pt2[0] = temp; // 0 0 1 } else { // just glue directly to the def points. VectorSet( pt, (stripDef[i] * (e->radius - e->rotation)) + e->rotation, 0, curveDef[i] * e->radius * e->backlerp ); VectorSet( pt2, (stripDef[i+1] * (e->radius - e->rotation)) + e->rotation, 0, curveDef[i+1] * e->radius * e->backlerp ); } RB_CHECKOVERFLOW( 4, 6 ); vbase = tess.numVertexes; // Actually generate the necessary verts VectorAdd( e->origin, oldpt, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = tess.xyz[tess.numVertexes][0] * 0.1f; tess.texCoords[tess.numVertexes][0][1] = tess.xyz[tess.numVertexes][1] * 0.1f; tess.vertexColors[tess.numVertexes][0] = tess.vertexColors[tess.numVertexes][1] = tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[0] * alphaDef[i]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorAdd( e->origin, oldpt2, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = tess.xyz[tess.numVertexes][0] * 0.1f; tess.texCoords[tess.numVertexes][0][1] = tess.xyz[tess.numVertexes][1] * 0.1f; tess.vertexColors[tess.numVertexes][0] = tess.vertexColors[tess.numVertexes][1] = tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[0] * alphaDef[i+1]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorAdd( e->origin, pt, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = tess.xyz[tess.numVertexes][0] * 0.1f; tess.texCoords[tess.numVertexes][0][1] = tess.xyz[tess.numVertexes][1] * 0.1f; tess.vertexColors[tess.numVertexes][0] = tess.vertexColors[tess.numVertexes][1] = tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[0] * alphaDef[i]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; VectorAdd( e->origin, pt2, tess.xyz[tess.numVertexes] ); tess.texCoords[tess.numVertexes][0][0] = tess.xyz[tess.numVertexes][0] * 0.1f; tess.texCoords[tess.numVertexes][0][1] = tess.xyz[tess.numVertexes][1] * 0.1f; tess.vertexColors[tess.numVertexes][0] = tess.vertexColors[tess.numVertexes][1] = tess.vertexColors[tess.numVertexes][2] = e->shaderRGBA[0] * alphaDef[i+1]; tess.vertexColors[tess.numVertexes][3] = e->shaderRGBA[3]; tess.numVertexes++; tess.indexes[tess.numIndexes++] = vbase; tess.indexes[tess.numIndexes++] = vbase + 1; tess.indexes[tess.numIndexes++] = vbase + 3; tess.indexes[tess.numIndexes++] = vbase + 3; tess.indexes[tess.numIndexes++] = vbase + 2; tess.indexes[tess.numIndexes++] = vbase; // Shuffle new points to old VectorCopy2( pt, oldpt ); VectorCopy2( pt2, oldpt2 ); } } } /* =========================================================================== NULL MODEL =========================================================================== */ /* =================== RB_SurfaceAxis Draws x/y/z lines from the origin for orientation debugging =================== */ static void RB_SurfaceAxis( void ) { GL_Bind( tr.whiteImage ); GL_State( GLS_DEFAULT ); qglLineWidth( 3 ); qglBegin( GL_LINES ); qglColor3f( 1,0,0 ); qglVertex3f( 0,0,0 ); qglVertex3f( 16,0,0 ); qglColor3f( 0,1,0 ); qglVertex3f( 0,0,0 ); qglVertex3f( 0,16,0 ); qglColor3f( 0,0,1 ); qglVertex3f( 0,0,0 ); qglVertex3f( 0,0,16 ); qglEnd(); qglLineWidth( 1 ); } //=========================================================================== /* ==================== RB_SurfaceEntity Entities that have a single procedurally generated surface ==================== */ void RB_SurfaceEntity( surfaceType_t *surfType ) { switch( backEnd.currentEntity->e.reType ) { case RT_SPRITE: RB_SurfaceSprite(); break; case RT_ORIENTED_QUAD: RB_SurfaceOrientedQuad(); break; case RT_LINE: RB_SurfaceLine(); break; case RT_ELECTRICITY: RB_SurfaceElectricity(); break; case RT_BEAM: RB_SurfaceBeam(); break; case RT_SABER_GLOW: RB_SurfaceSaberGlow(); break; case RT_CYLINDER: RB_SurfaceCylinder(); break; case RT_LATHE: RB_SurfaceLathe(); break; case RT_CLOUDS: RB_SurfaceClouds(); break; default: RB_SurfaceAxis(); break; } return; } void RB_SurfaceBad( surfaceType_t *surfType ) { ri.Printf( PRINT_ALL, "Bad surface tesselated.\n" ); } /* ================== RB_TestZFlare This is called at surface tesselation time ================== */ static bool RB_TestZFlare( vec3_t point) { int i; vec4_t eye, clip, normalized, window; // if the point is off the screen, don't bother adding it // calculate screen coordinates and depth R_TransformModelToClip( point, backEnd.ori.modelMatrix, backEnd.viewParms.projectionMatrix, eye, clip ); // check to see if the point is completely off screen for ( i = 0 ; i < 3 ; i++ ) { if ( clip[i] >= clip[3] || clip[i] <= -clip[3] ) { return qfalse; } } R_TransformClipToWindow( clip, &backEnd.viewParms, normalized, window ); if ( window[0] < 0 || window[0] >= backEnd.viewParms.viewportWidth || window[1] < 0 || window[1] >= backEnd.viewParms.viewportHeight ) { return qfalse; // shouldn't happen, since we check the clip[] above, except for FP rounding } //do test float depth = 0.0f; bool visible; float screenZ; // read back the z buffer contents if ( r_flares->integer !=1 ) { //skipping the the z-test return true; } // doing a readpixels is as good as doing a glFinish(), so // don't bother with another sync glState.finishCalled = qfalse; qglReadPixels( backEnd.viewParms.viewportX + window[0],backEnd.viewParms.viewportY + window[1], 1, 1, GL_DEPTH_COMPONENT, GL_FLOAT, &depth ); screenZ = backEnd.viewParms.projectionMatrix[14] / ( ( 2*depth - 1 ) * backEnd.viewParms.projectionMatrix[11] - backEnd.viewParms.projectionMatrix[10] ); visible = ( -eye[2] - -screenZ ) < 24; return visible; } void RB_SurfaceFlare( srfFlare_t *surf ) { vec3_t left, up; float radius; byte color[4]; vec3_t dir; vec3_t origin; float d, dist; if ( !r_flares->integer ) { return; } if (!RB_TestZFlare( surf->origin ) ) { return; } // calculate the xyz locations for the four corners VectorMA( surf->origin, 3, surf->normal, origin ); float* snormal = surf->normal; VectorSubtract( origin, backEnd.viewParms.ori.origin, dir ); dist = VectorNormalize( dir ); d = -DotProduct( dir, snormal ); if ( d < 0 ) { d = -d; } // fade the intensity of the flare down as the // light surface turns away from the viewer color[0] = d * 255; color[1] = d * 255; color[2] = d * 255; color[3] = 255; //only gets used if the shader has cgen exact_vertex! radius = tess.shader->portalRange ? tess.shader->portalRange: 30; if (dist < 512.0f) { radius = radius * dist / 512.0f; } if (radius<5.0f) { radius = 5.0f; } VectorScale( backEnd.viewParms.ori.axis[1], radius, left ); VectorScale( backEnd.viewParms.ori.axis[2], radius, up ); if ( backEnd.viewParms.isMirror ) { VectorSubtract( vec3_origin, left, left ); } RB_AddQuadStamp( origin, left, up, color ); } void RB_SurfaceDisplayList( srfDisplayList_t *surf ) { // all appropriate state must be set in RB_BeginSurface // this isn't implemented yet... qglCallList( surf->listNum ); } void RB_SurfaceSkip( void *surf ) { } void (*rb_surfaceTable[SF_NUM_SURFACE_TYPES])( void *) = { (void(*)(void*))RB_SurfaceBad, // SF_BAD, (void(*)(void*))RB_SurfaceSkip, // SF_SKIP, (void(*)(void*))RB_SurfaceFace, // SF_FACE, (void(*)(void*))RB_SurfaceGrid, // SF_GRID, (void(*)(void*))RB_SurfaceTriangles, // SF_TRIANGLES, (void(*)(void*))RB_SurfacePolychain, // SF_POLY, (void(*)(void*))RB_SurfaceMesh, // SF_MD3, /* Ghoul2 Insert Start */ (void(*)(void*))RB_SurfaceGhoul, // SF_MDX, /* Ghoul2 Insert End */ (void(*)(void*))RB_SurfaceFlare, // SF_FLARE, (void(*)(void*))RB_SurfaceEntity, // SF_ENTITY (void(*)(void*))RB_SurfaceDisplayList // SF_DISPLAY_LIST };
gpl-2.0
claudm/zentyal
extra/novnc/utils/json2graph.py
6678
#!/usr/bin/env python ''' Use matplotlib to generate performance charts Copyright 2011 Joel Martin Licensed under GPL version 3 (see docs/LICENSE.GPL-3) ''' # a bar plot with errorbars import sys, json, pprint import numpy as np import matplotlib.pyplot as plt from matplotlib.font_manager import FontProperties def usage(): print "%s json_file level1 level2 level3 [legend_height]\n\n" % sys.argv[0] print "Description:\n" print "level1, level2, and level3 are one each of the following:\n"; print " select=ITEM - select only ITEM at this level"; print " bar - each item on this level becomes a graph bar"; print " group - items on this level become groups of bars"; print "\n"; print "json_file is a file containing json data in the following format:\n" print ' {'; print ' "conf": {'; print ' "order_l1": ['; print ' "level1_label1",'; print ' "level1_label2",'; print ' ...'; print ' ],'; print ' "order_l2": ['; print ' "level2_label1",'; print ' "level2_label2",'; print ' ...'; print ' ],'; print ' "order_l3": ['; print ' "level3_label1",'; print ' "level3_label2",'; print ' ...'; print ' ]'; print ' },'; print ' "stats": {'; print ' "level1_label1": {'; print ' "level2_label1": {'; print ' "level3_label1": [val1, val2, val3],'; print ' "level3_label2": [val1, val2, val3],'; print ' ...'; print ' },'; print ' "level2_label2": {'; print ' ...'; print ' },'; print ' },'; print ' "level1_label2": {'; print ' ...'; print ' },'; print ' ...'; print ' },'; print ' }'; sys.exit(2) def error(msg): print msg sys.exit(1) #colors = ['#ff0000', '#0863e9', '#00f200', '#ffa100', # '#800000', '#805100', '#013075', '#007900'] colors = ['#ff0000', '#00ff00', '#0000ff', '#dddd00', '#dd00dd', '#00dddd', '#dd6622', '#dd2266', '#66dd22', '#8844dd', '#44dd88', '#4488dd'] if len(sys.argv) < 5: usage() filename = sys.argv[1] L1 = sys.argv[2] L2 = sys.argv[3] L3 = sys.argv[4] if len(sys.argv) > 5: legendHeight = float(sys.argv[5]) else: legendHeight = 0.75 # Load the JSON data from the file data = json.loads(file(filename).read()) conf = data['conf'] stats = data['stats'] # Sanity check data hierarchy if len(conf['order_l1']) != len(stats.keys()): error("conf.order_l1 does not match stats level 1") for l1 in stats.keys(): if len(conf['order_l2']) != len(stats[l1].keys()): error("conf.order_l2 does not match stats level 2 for %s" % l1) if conf['order_l1'].count(l1) < 1: error("%s not found in conf.order_l1" % l1) for l2 in stats[l1].keys(): if len(conf['order_l3']) != len(stats[l1][l2].keys()): error("conf.order_l3 does not match stats level 3") if conf['order_l2'].count(l2) < 1: error("%s not found in conf.order_l2" % l2) for l3 in stats[l1][l2].keys(): if conf['order_l3'].count(l3) < 1: error("%s not found in conf.order_l3" % l3) # # Generate the data based on the level specifications # bar_labels = None group_labels = None bar_vals = [] bar_sdvs = [] if L3.startswith("select="): select_label = l3 = L3.split("=")[1] bar_labels = conf['order_l1'] group_labels = conf['order_l2'] bar_vals = [[0]*len(group_labels) for i in bar_labels] bar_sdvs = [[0]*len(group_labels) for i in bar_labels] for b in range(len(bar_labels)): l1 = bar_labels[b] for g in range(len(group_labels)): l2 = group_labels[g] bar_vals[b][g] = np.mean(stats[l1][l2][l3]) bar_sdvs[b][g] = np.std(stats[l1][l2][l3]) elif L2.startswith("select="): select_label = l2 = L2.split("=")[1] bar_labels = conf['order_l1'] group_labels = conf['order_l3'] bar_vals = [[0]*len(group_labels) for i in bar_labels] bar_sdvs = [[0]*len(group_labels) for i in bar_labels] for b in range(len(bar_labels)): l1 = bar_labels[b] for g in range(len(group_labels)): l3 = group_labels[g] bar_vals[b][g] = np.mean(stats[l1][l2][l3]) bar_sdvs[b][g] = np.std(stats[l1][l2][l3]) elif L1.startswith("select="): select_label = l1 = L1.split("=")[1] bar_labels = conf['order_l2'] group_labels = conf['order_l3'] bar_vals = [[0]*len(group_labels) for i in bar_labels] bar_sdvs = [[0]*len(group_labels) for i in bar_labels] for b in range(len(bar_labels)): l2 = bar_labels[b] for g in range(len(group_labels)): l3 = group_labels[g] bar_vals[b][g] = np.mean(stats[l1][l2][l3]) bar_sdvs[b][g] = np.std(stats[l1][l2][l3]) else: usage() # If group is before bar then flip (zip) the data if [L1, L2, L3].index("group") < [L1, L2, L3].index("bar"): bar_labels, group_labels = group_labels, bar_labels bar_vals = zip(*bar_vals) bar_sdvs = zip(*bar_sdvs) print "bar_vals:", bar_vals # # Now render the bar graph # ind = np.arange(len(group_labels)) # the x locations for the groups width = 0.8 * (1.0/len(bar_labels)) # the width of the bars fig = plt.figure(figsize=(10,6), dpi=80) plot = fig.add_subplot(1, 1, 1) rects = [] for i in range(len(bar_vals)): rects.append(plot.bar(ind+width*i, bar_vals[i], width, color=colors[i], yerr=bar_sdvs[i], align='center')) # add some plot.set_ylabel('Milliseconds (less is better)') plot.set_title("Javascript array test: %s" % select_label) plot.set_xticks(ind+width) plot.set_xticklabels( group_labels ) fontP = FontProperties() fontP.set_size('small') plot.legend( [r[0] for r in rects], bar_labels, prop=fontP, loc = 'center right', bbox_to_anchor = (1.0, legendHeight)) def autolabel(rects): # attach some text labels for rect in rects: height = rect.get_height() if np.isnan(height): height = 0.0 plot.text(rect.get_x()+rect.get_width()/2., height+20, '%d'%int(height), ha='center', va='bottom', size='7') for rect in rects: autolabel(rect) # Adjust axis sizes axis = list(plot.axis()) axis[0] = -width # Make sure left side has enough for bar #axis[1] = axis[1] * 1.20 # Add 20% to the right to make sure it fits axis[2] = 0 # Make y-axis start at 0 axis[3] = axis[3] * 1.10 # Add 10% to the top plot.axis(axis) plt.show()
gpl-2.0
ForAEdesWeb/AEW25
components/com_jcomments/plugins/com_jvideo.plugin.php
970
<?php /** * JComments plugin for JVideo (http://jvideo.infinovision.com/) * * @version 2.3 * @package JComments * @author Sergey M. Litvinov ([email protected]) * @copyright (C) 2006-2013 by Sergey M. Litvinov (http://www.joomlatune.ru) * @license GNU/GPL: http://www.gnu.org/copyleft/gpl.html */ defined('_JEXEC') or die; class jc_com_jvideo extends JCommentsPlugin { function getObjectInfo($id, $language = null) { $db = JFactory::getDBO(); $db->setQuery('SELECT id, video_title, user_id FROM #__jvideos WHERE id = ' . $id); $row = $db->loadObject(); $info = new JCommentsObjectInfo(); if (!empty($row)) { $Itemid = self::getItemid('com_jvideo'); $Itemid = $Itemid > 0 ? '&Itemid='.$Itemid : ''; $info->title = $row->video_title; $info->access = 0; $info->userid = $row->user_id; $info->link = JRoute::_('index.php?option=com_jvideo&view=watch&id='.$id.$Itemid); } return $info; } }
gpl-2.0
sungsujo/nacl-llvm-branches.llvm-gcc-trunk
libstdc++-v3/testsuite/performance/21_strings/string_copy_cons_and_dest.cc
1998
// Copyright (C) 2006 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 2, or (at your option) // any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING. If not, write to the Free // Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, // USA. // As a special exception, you may use this file as part of a free software // library without restriction. Specifically, if other files instantiate // templates or use macros or inline functions from this file, or you compile // this file and link it with other files to produce an executable, this // file does not by itself cause the resulting executable to be covered by // the GNU General Public License. This exception does not however // invalidate any other reasons why the executable file might be covered by // the GNU General Public License. #include <string> #include <testsuite_performance.h> void benchmark(long len) { using namespace std; using namespace __gnu_test; time_counter time; resource_counter resource; start_counters(time, resource); string a("1"); for (long i = 0; i < len; ++i) { string ss1(a); string ss2(ss1); string ss3(ss2); string ss4(ss3); string ss5(ss4); } stop_counters(time, resource); report_performance(__FILE__, "", time, resource); clear_counters(time, resource); } int main() { benchmark(1000000); benchmark(10000000); benchmark(100000000); return 0; }
gpl-2.0
larenz/Gokujo
plugins/editors/rokpad/ace/snippets/ruby.js
23448
/* * @author RocketTheme http://www.rockettheme.com * @copyright Copyright (C) 2007 - 2014 RocketTheme, LLC * @license http://www.gnu.org/licenses/gpl-2.0.html GNU/GPLv2 only */ ace.define('ace/snippets/ruby', ['require', 'exports', 'module' ], function(require, exports, module) { exports.snippetText = "########################################\n\ # Ruby snippets - for Rails, see below #\n\ ########################################\n\ \n\ # encoding for Ruby 1.9\n\ snippet enc\n\ # encoding: utf-8\n\ \n\ # #!/usr/bin/env ruby\n\ snippet #!\n\ #!/usr/bin/env ruby\n\ # encoding: utf-8\n\ \n\ # New Block\n\ snippet =b\n\ =begin rdoc\n\ ${1}\n\ =end\n\ snippet y\n\ :yields: ${1:arguments}\n\ snippet rb\n\ #!/usr/bin/env ruby -wKU\n\ snippet beg\n\ begin\n\ ${3}\n\ rescue ${1:Exception} => ${2:e}\n\ end\n\ \n\ snippet req require\n\ require \"${1}\"${2}\n\ snippet #\n\ # =>\n\ snippet end\n\ __END__\n\ snippet case\n\ case ${1:object}\n\ when ${2:condition}\n\ ${3}\n\ end\n\ snippet when\n\ when ${1:condition}\n\ ${2}\n\ snippet def\n\ def ${1:method_name}\n\ ${2}\n\ end\n\ snippet deft\n\ def test_${1:case_name}\n\ ${2}\n\ end\n\ snippet if\n\ if ${1:condition}\n\ ${2}\n\ end\n\ snippet ife\n\ if ${1:condition}\n\ ${2}\n\ else\n\ ${3}\n\ end\n\ snippet elsif\n\ elsif ${1:condition}\n\ ${2}\n\ snippet unless\n\ unless ${1:condition}\n\ ${2}\n\ end\n\ snippet while\n\ while ${1:condition}\n\ ${2}\n\ end\n\ snippet for\n\ for ${1:e} in ${2:c}\n\ ${3}\n\ end\n\ snippet until\n\ until ${1:condition}\n\ ${2}\n\ end\n\ snippet cla class .. end\n\ class ${1:`substitute(Filename(), '\\(_\\|^\\)\\(.\\)', '\\u\\2', 'g')`}\n\ ${2}\n\ end\n\ snippet cla class .. initialize .. end\n\ class ${1:`substitute(Filename(), '\\(_\\|^\\)\\(.\\)', '\\u\\2', 'g')`}\n\ def initialize(${2:args})\n\ ${3}\n\ end\n\ end\n\ snippet cla class .. < ParentClass .. initialize .. end\n\ class ${1:`substitute(Filename(), '\\(_\\|^\\)\\(.\\)', '\\u\\2', 'g')`} < ${2:ParentClass}\n\ def initialize(${3:args})\n\ ${4}\n\ end\n\ end\n\ snippet cla ClassName = Struct .. do .. end\n\ ${1:`substitute(Filename(), '\\(_\\|^\\)\\(.\\)', '\\u\\2', 'g')`} = Struct.new(:${2:attr_names}) do\n\ def ${3:method_name}\n\ ${4}\n\ end\n\ end\n\ snippet cla class BlankSlate .. initialize .. end\n\ class ${1:BlankSlate}\n\ instance_methods.each { |meth| undef_method(meth) unless meth =~ /\\A__/ }\n\ end\n\ snippet cla class << self .. end\n\ class << ${1:self}\n\ ${2}\n\ end\n\ # class .. < DelegateClass .. initialize .. end\n\ snippet cla-\n\ class ${1:`substitute(Filename(), '\\(_\\|^\\)\\(.\\)', '\\u\\2', 'g')`} < DelegateClass(${2:ParentClass})\n\ def initialize(${3:args})\n\ super(${4:del_obj})\n\ \n\ ${5}\n\ end\n\ end\n\ snippet mod module .. end\n\ module ${1:`substitute(Filename(), '\\(_\\|^\\)\\(.\\)', '\\u\\2', 'g')`}\n\ ${2}\n\ end\n\ snippet mod module .. module_function .. end\n\ module ${1:`substitute(Filename(), '\\(_\\|^\\)\\(.\\)', '\\u\\2', 'g')`}\n\ module_function\n\ \n\ ${2}\n\ end\n\ snippet mod module .. ClassMethods .. end\n\ module ${1:`substitute(Filename(), '\\(_\\|^\\)\\(.\\)', '\\u\\2', 'g')`}\n\ module ClassMethods\n\ ${2}\n\ end\n\ \n\ module InstanceMethods\n\ \n\ end\n\ \n\ def self.included(receiver)\n\ receiver.extend ClassMethods\n\ receiver.send :include, InstanceMethods\n\ end\n\ end\n\ # attr_reader\n\ snippet r\n\ attr_reader :${1:attr_names}\n\ # attr_writer\n\ snippet w\n\ attr_writer :${1:attr_names}\n\ # attr_accessor\n\ snippet rw\n\ attr_accessor :${1:attr_names}\n\ snippet atp\n\ attr_protected :${1:attr_names}\n\ snippet ata\n\ attr_accessible :${1:attr_names}\n\ # include Enumerable\n\ snippet Enum\n\ include Enumerable\n\ \n\ def each(&block)\n\ ${1}\n\ end\n\ # include Comparable\n\ snippet Comp\n\ include Comparable\n\ \n\ def <=>(other)\n\ ${1}\n\ end\n\ # extend Forwardable\n\ snippet Forw-\n\ extend Forwardable\n\ # def self\n\ snippet defs\n\ def self.${1:class_method_name}\n\ ${2}\n\ end\n\ # def method_missing\n\ snippet defmm\n\ def method_missing(meth, *args, &blk)\n\ ${1}\n\ end\n\ snippet defd\n\ def_delegator :${1:@del_obj}, :${2:del_meth}, :${3:new_name}\n\ snippet defds\n\ def_delegators :${1:@del_obj}, :${2:del_methods}\n\ snippet am\n\ alias_method :${1:new_name}, :${2:old_name}\n\ snippet app\n\ if __FILE__ == $PROGRAM_NAME\n\ ${1}\n\ end\n\ # usage_if()\n\ snippet usai\n\ if ARGV.${1}\n\ abort \"Usage: #{$PROGRAM_NAME} ${2:ARGS_GO_HERE}\"${3}\n\ end\n\ # usage_unless()\n\ snippet usau\n\ unless ARGV.${1}\n\ abort \"Usage: #{$PROGRAM_NAME} ${2:ARGS_GO_HERE}\"${3}\n\ end\n\ snippet array\n\ Array.new(${1:10}) { |${2:i}| ${3} }\n\ snippet hash\n\ Hash.new { |${1:hash}, ${2:key}| $1[$2] = ${3} }\n\ snippet file File.foreach() { |line| .. }\n\ File.foreach(${1:\"path/to/file\"}) { |${2:line}| ${3} }\n\ snippet file File.read()\n\ File.read(${1:\"path/to/file\"})${2}\n\ snippet Dir Dir.global() { |file| .. }\n\ Dir.glob(${1:\"dir/glob/*\"}) { |${2:file}| ${3} }\n\ snippet Dir Dir[\"..\"]\n\ Dir[${1:\"glob/**/*.rb\"}]${2}\n\ snippet dir\n\ Filename.dirname(__FILE__)\n\ snippet deli\n\ delete_if { |${1:e}| ${2} }\n\ snippet fil\n\ fill(${1:range}) { |${2:i}| ${3} }\n\ # flatten_once()\n\ snippet flao\n\ inject(Array.new) { |${1:arr}, ${2:a}| $1.push(*$2)}${3}\n\ snippet zip\n\ zip(${1:enums}) { |${2:row}| ${3} }\n\ # downto(0) { |n| .. }\n\ snippet dow\n\ downto(${1:0}) { |${2:n}| ${3} }\n\ snippet ste\n\ step(${1:2}) { |${2:n}| ${3} }\n\ snippet tim\n\ times { |${1:n}| ${2} }\n\ snippet upt\n\ upto(${1:1.0/0.0}) { |${2:n}| ${3} }\n\ snippet loo\n\ loop { ${1} }\n\ snippet ea\n\ each { |${1:e}| ${2} }\n\ snippet ead\n\ each do |${1:e}|\n\ ${2}\n\ end\n\ snippet eab\n\ each_byte { |${1:byte}| ${2} }\n\ snippet eac- each_char { |chr| .. }\n\ each_char { |${1:chr}| ${2} }\n\ snippet eac- each_cons(..) { |group| .. }\n\ each_cons(${1:2}) { |${2:group}| ${3} }\n\ snippet eai\n\ each_index { |${1:i}| ${2} }\n\ snippet eaid\n\ each_index do |${1:i}|\n\ ${2}\n\ end\n\ snippet eak\n\ each_key { |${1:key}| ${2} }\n\ snippet eakd\n\ each_key do |${1:key}|\n\ ${2}\n\ end\n\ snippet eal\n\ each_line { |${1:line}| ${2} }\n\ snippet eald\n\ each_line do |${1:line}|\n\ ${2}\n\ end\n\ snippet eap\n\ each_pair { |${1:name}, ${2:val}| ${3} }\n\ snippet eapd\n\ each_pair do |${1:name}, ${2:val}|\n\ ${3}\n\ end\n\ snippet eas-\n\ each_slice(${1:2}) { |${2:group}| ${3} }\n\ snippet easd-\n\ each_slice(${1:2}) do |${2:group}|\n\ ${3}\n\ end\n\ snippet eav\n\ each_value { |${1:val}| ${2} }\n\ snippet eavd\n\ each_value do |${1:val}|\n\ ${2}\n\ end\n\ snippet eawi\n\ each_with_index { |${1:e}, ${2:i}| ${3} }\n\ snippet eawid\n\ each_with_index do |${1:e},${2:i}|\n\ ${3}\n\ end\n\ snippet reve\n\ reverse_each { |${1:e}| ${2} }\n\ snippet reved\n\ reverse_each do |${1:e}|\n\ ${2}\n\ end\n\ snippet inj\n\ inject(${1:init}) { |${2:mem}, ${3:var}| ${4} }\n\ snippet injd\n\ inject(${1:init}) do |${2:mem}, ${3:var}|\n\ ${4}\n\ end\n\ snippet map\n\ map { |${1:e}| ${2} }\n\ snippet mapd\n\ map do |${1:e}|\n\ ${2}\n\ end\n\ snippet mapwi-\n\ enum_with_index.map { |${1:e}, ${2:i}| ${3} }\n\ snippet sor\n\ sort { |a, b| ${1} }\n\ snippet sorb\n\ sort_by { |${1:e}| ${2} }\n\ snippet ran\n\ sort_by { rand }\n\ snippet all\n\ all? { |${1:e}| ${2} }\n\ snippet any\n\ any? { |${1:e}| ${2} }\n\ snippet cl\n\ classify { |${1:e}| ${2} }\n\ snippet col\n\ collect { |${1:e}| ${2} }\n\ snippet cold\n\ collect do |${1:e}|\n\ ${2}\n\ end\n\ snippet det\n\ detect { |${1:e}| ${2} }\n\ snippet detd\n\ detect do |${1:e}|\n\ ${2}\n\ end\n\ snippet fet\n\ fetch(${1:name}) { |${2:key}| ${3} }\n\ snippet fin\n\ find { |${1:e}| ${2} }\n\ snippet find\n\ find do |${1:e}|\n\ ${2}\n\ end\n\ snippet fina\n\ find_all { |${1:e}| ${2} }\n\ snippet finad\n\ find_all do |${1:e}|\n\ ${2}\n\ end\n\ snippet gre\n\ grep(${1:/pattern/}) { |${2:match}| ${3} }\n\ snippet sub\n\ ${1:g}sub(${2:/pattern/}) { |${3:match}| ${4} }\n\ snippet sca\n\ scan(${1:/pattern/}) { |${2:match}| ${3} }\n\ snippet scad\n\ scan(${1:/pattern/}) do |${2:match}|\n\ ${3}\n\ end\n\ snippet max\n\ max { |a, b| ${1} }\n\ snippet min\n\ min { |a, b| ${1} }\n\ snippet par\n\ partition { |${1:e}| ${2} }\n\ snippet pard\n\ partition do |${1:e}|\n\ ${2}\n\ end\n\ snippet rej\n\ reject { |${1:e}| ${2} }\n\ snippet rejd\n\ reject do |${1:e}|\n\ ${2}\n\ end\n\ snippet sel\n\ select { |${1:e}| ${2} }\n\ snippet seld\n\ select do |${1:e}|\n\ ${2}\n\ end\n\ snippet lam\n\ lambda { |${1:args}| ${2} }\n\ snippet doo\n\ do\n\ ${1}\n\ end\n\ snippet dov\n\ do |${1:variable}|\n\ ${2}\n\ end\n\ snippet :\n\ :${1:key} => ${2:\"value\"}${3}\n\ snippet ope\n\ open(${1:\"path/or/url/or/pipe\"}, \"${2:w}\") { |${3:io}| ${4} }\n\ # path_from_here()\n\ snippet fpath\n\ File.join(File.dirname(__FILE__), *%2[${1:rel path here}])${2}\n\ # unix_filter {}\n\ snippet unif\n\ ARGF.each_line${1} do |${2:line}|\n\ ${3}\n\ end\n\ # option_parse {}\n\ snippet optp\n\ require \"optparse\"\n\ \n\ options = {${1:default => \"args\"}}\n\ \n\ ARGV.options do |opts|\n\ opts.banner = \"Usage: #{File.basename($PROGRAM_NAME)}\n\ snippet opt\n\ opts.on( \"-${1:o}\", \"--${2:long-option-name}\", ${3:String},\n\ \"${4:Option description.}\") do |${5:opt}|\n\ ${6}\n\ end\n\ snippet tc\n\ require \"test/unit\"\n\ \n\ require \"${1:library_file_name}\"\n\ \n\ class Test${2:$1} < Test::Unit::TestCase\n\ def test_${3:case_name}\n\ ${4}\n\ end\n\ end\n\ snippet ts\n\ require \"test/unit\"\n\ \n\ require \"tc_${1:test_case_file}\"\n\ require \"tc_${2:test_case_file}\"${3}\n\ snippet as\n\ assert ${1:test}, \"${2:Failure message.}\"${3}\n\ snippet ase\n\ assert_equal ${1:expected}, ${2:actual}${3}\n\ snippet asne\n\ assert_not_equal ${1:unexpected}, ${2:actual}${3}\n\ snippet asid\n\ assert_in_delta ${1:expected_float}, ${2:actual_float}, ${3:2 ** -20}${4}\n\ snippet asio\n\ assert_instance_of ${1:ExpectedClass}, ${2:actual_instance}${3}\n\ snippet asko\n\ assert_kind_of ${1:ExpectedKind}, ${2:actual_instance}${3}\n\ snippet asn\n\ assert_nil ${1:instance}${2}\n\ snippet asnn\n\ assert_not_nil ${1:instance}${2}\n\ snippet asm\n\ assert_match /${1:expected_pattern}/, ${2:actual_string}${3}\n\ snippet asnm\n\ assert_no_match /${1:unexpected_pattern}/, ${2:actual_string}${3}\n\ snippet aso\n\ assert_operator ${1:left}, :${2:operator}, ${3:right}${4}\n\ snippet asr\n\ assert_raise ${1:Exception} { ${2} }\n\ snippet asrd\n\ assert_raise ${1:Exception} do\n\ ${2}\n\ end\n\ snippet asnr\n\ assert_nothing_raised ${1:Exception} { ${2} }\n\ snippet asnrd\n\ assert_nothing_raised ${1:Exception} do\n\ ${2}\n\ end\n\ snippet asrt\n\ assert_respond_to ${1:object}, :${2:method}${3}\n\ snippet ass assert_same(..)\n\ assert_same ${1:expected}, ${2:actual}${3}\n\ snippet ass assert_send(..)\n\ assert_send [${1:object}, :${2:message}, ${3:args}]${4}\n\ snippet asns\n\ assert_not_same ${1:unexpected}, ${2:actual}${3}\n\ snippet ast\n\ assert_throws :${1:expected} { ${2} }\n\ snippet astd\n\ assert_throws :${1:expected} do\n\ ${2}\n\ end\n\ snippet asnt\n\ assert_nothing_thrown { ${1} }\n\ snippet asntd\n\ assert_nothing_thrown do\n\ ${1}\n\ end\n\ snippet fl\n\ flunk \"${1:Failure message.}\"${2}\n\ # Benchmark.bmbm do .. end\n\ snippet bm-\n\ TESTS = ${1:10_000}\n\ Benchmark.bmbm do |results|\n\ ${2}\n\ end\n\ snippet rep\n\ results.report(\"${1:name}:\") { TESTS.times { ${2} }}\n\ # Marshal.dump(.., file)\n\ snippet Md\n\ File.open(${1:\"path/to/file.dump\"}, \"wb\") { |${2:file}| Marshal.dump(${3:obj}, $2) }${4}\n\ # Mashal.load(obj)\n\ snippet Ml\n\ File.open(${1:\"path/to/file.dump\"}, \"rb\") { |${2:file}| Marshal.load($2) }${3}\n\ # deep_copy(..)\n\ snippet deec\n\ Marshal.load(Marshal.dump(${1:obj_to_copy}))${2}\n\ snippet Pn-\n\ PStore.new(${1:\"file_name.pstore\"})${2}\n\ snippet tra\n\ transaction(${1:true}) { ${2} }\n\ # xmlread(..)\n\ snippet xml-\n\ REXML::Document.new(File.read(${1:\"path/to/file\"}))${2}\n\ # xpath(..) { .. }\n\ snippet xpa\n\ elements.each(${1:\"//Xpath\"}) do |${2:node}|\n\ ${3}\n\ end\n\ # class_from_name()\n\ snippet clafn\n\ split(\"::\").inject(Object) { |par, const| par.const_get(const) }\n\ # singleton_class()\n\ snippet sinc\n\ class << self; self end\n\ snippet nam\n\ namespace :${1:`Filename()`} do\n\ ${2}\n\ end\n\ snippet tas\n\ desc \"${1:Task description}\"\n\ task :${2:task_name => [:dependent, :tasks]} do\n\ ${3}\n\ end\n\ # block\n\ snippet b\n\ { |${1:var}| ${2} }\n\ snippet begin\n\ begin\n\ raise 'A test exception.'\n\ rescue Exception => e\n\ puts e.message\n\ puts e.backtrace.inspect\n\ else\n\ # other exception\n\ ensure\n\ # always executed\n\ end\n\ \n\ #debugging\n\ snippet debug\n\ require 'ruby-debug'; debugger; true;\n\ snippet pry\n\ require 'pry'; binding.pry\n\ \n\ #############################################\n\ # Rails snippets - for pure Ruby, see above #\n\ #############################################\n\ snippet art\n\ assert_redirected_to ${1::action => \"${2:index}\"}\n\ snippet artnp\n\ assert_redirected_to ${1:parent}_${2:child}_path(${3:@$1}, ${4:@$2})\n\ snippet artnpp\n\ assert_redirected_to ${1:parent}_${2:child}_path(${3:@$1})\n\ snippet artp\n\ assert_redirected_to ${1:model}_path(${2:@$1})\n\ snippet artpp\n\ assert_redirected_to ${1:model}s_path\n\ snippet asd\n\ assert_difference \"${1:Model}.${2:count}\", $1 do\n\ ${3}\n\ end\n\ snippet asnd\n\ assert_no_difference \"${1:Model}.${2:count}\" do\n\ ${3}\n\ end\n\ snippet asre\n\ assert_response :${1:success}, @response.body${2}\n\ snippet asrj\n\ assert_rjs :${1:replace}, \"${2:dom id}\"\n\ snippet ass assert_select(..)\n\ assert_select '${1:path}', :${2:text} => '${3:inner_html' ${4:do}\n\ snippet bf\n\ before_filter :${1:method}\n\ snippet bt\n\ belongs_to :${1:association}\n\ snippet crw\n\ cattr_accessor :${1:attr_names}\n\ snippet defcreate\n\ def create\n\ @${1:model_class_name} = ${2:ModelClassName}.new(params[:$1])\n\ \n\ respond_to do |wants|\n\ if @$1.save\n\ flash[:notice] = '$2 was successfully created.'\n\ wants.html { redirect_to(@$1) }\n\ wants.xml { render :xml => @$1, :status => :created, :location => @$1 }\n\ else\n\ wants.html { render :action => \"new\" }\n\ wants.xml { render :xml => @$1.errors, :status => :unprocessable_entity }\n\ end\n\ end\n\ end${3}\n\ snippet defdestroy\n\ def destroy\n\ @${1:model_class_name} = ${2:ModelClassName}.find(params[:id])\n\ @$1.destroy\n\ \n\ respond_to do |wants|\n\ wants.html { redirect_to($1s_url) }\n\ wants.xml { head :ok }\n\ end\n\ end${3}\n\ snippet defedit\n\ def edit\n\ @${1:model_class_name} = ${2:ModelClassName}.find(params[:id])\n\ end\n\ snippet defindex\n\ def index\n\ @${1:model_class_name} = ${2:ModelClassName}.all\n\ \n\ respond_to do |wants|\n\ wants.html # index.html.erb\n\ wants.xml { render :xml => @$1s }\n\ end\n\ end${3}\n\ snippet defnew\n\ def new\n\ @${1:model_class_name} = ${2:ModelClassName}.new\n\ \n\ respond_to do |wants|\n\ wants.html # new.html.erb\n\ wants.xml { render :xml => @$1 }\n\ end\n\ end${3}\n\ snippet defshow\n\ def show\n\ @${1:model_class_name} = ${2:ModelClassName}.find(params[:id])\n\ \n\ respond_to do |wants|\n\ wants.html # show.html.erb\n\ wants.xml { render :xml => @$1 }\n\ end\n\ end${3}\n\ snippet defupdate\n\ def update\n\ @${1:model_class_name} = ${2:ModelClassName}.find(params[:id])\n\ \n\ respond_to do |wants|\n\ if @$1.update_attributes(params[:$1])\n\ flash[:notice] = '$2 was successfully updated.'\n\ wants.html { redirect_to(@$1) }\n\ wants.xml { head :ok }\n\ else\n\ wants.html { render :action => \"edit\" }\n\ wants.xml { render :xml => @$1.errors, :status => :unprocessable_entity }\n\ end\n\ end\n\ end${3}\n\ snippet flash\n\ flash[:${1:notice}] = \"${2}\"\n\ snippet habtm\n\ has_and_belongs_to_many :${1:object}, :join_table => \"${2:table_name}\", :foreign_key => \"${3}_id\"${4}\n\ snippet hm\n\ has_many :${1:object}\n\ snippet hmd\n\ has_many :${1:other}s, :class_name => \"${2:$1}\", :foreign_key => \"${3:$1}_id\", :dependent => :destroy${4}\n\ snippet hmt\n\ has_many :${1:object}, :through => :${2:object}\n\ snippet ho\n\ has_one :${1:object}\n\ snippet i18\n\ I18n.t('${1:type.key}')${2}\n\ snippet ist\n\ <%= image_submit_tag(\"${1:agree.png}\", :id => \"${2:id}\"${3} %>\n\ snippet log\n\ Rails.logger.${1:debug} ${2}\n\ snippet log2\n\ RAILS_DEFAULT_LOGGER.${1:debug} ${2}\n\ snippet logd\n\ logger.debug { \"${1:message}\" }${2}\n\ snippet loge\n\ logger.error { \"${1:message}\" }${2}\n\ snippet logf\n\ logger.fatal { \"${1:message}\" }${2}\n\ snippet logi\n\ logger.info { \"${1:message}\" }${2}\n\ snippet logw\n\ logger.warn { \"${1:message}\" }${2}\n\ snippet mapc\n\ ${1:map}.${2:connect} '${3:controller/:action/:id}'\n\ snippet mapca\n\ ${1:map}.catch_all \"*${2:anything}\", :controller => \"${3:default}\", :action => \"${4:error}\"${5}\n\ snippet mapr\n\ ${1:map}.resource :${2:resource}\n\ snippet maprs\n\ ${1:map}.resources :${2:resource}\n\ snippet mapwo\n\ ${1:map}.with_options :${2:controller} => '${3:thing}' do |$3|\n\ ${4}\n\ end\n\ snippet mbs\n\ before_save :${1:method}\n\ snippet mcht\n\ change_table :${1:table_name} do |t|\n\ ${2}\n\ end\n\ snippet mp\n\ map(&:${1:id})\n\ snippet mrw\n\ mattr_accessor :${1:attr_names}\n\ snippet oa\n\ order(\"${1:field}\")\n\ snippet od\n\ order(\"${1:field} DESC\")\n\ snippet pa\n\ params[:${1:id}]${2}\n\ snippet ra\n\ render :action => \"${1:action}\"\n\ snippet ral\n\ render :action => \"${1:action}\", :layout => \"${2:layoutname}\"\n\ snippet rest\n\ respond_to do |wants|\n\ wants.${1:html} { ${2} }\n\ end\n\ snippet rf\n\ render :file => \"${1:filepath}\"\n\ snippet rfu\n\ render :file => \"${1:filepath}\", :use_full_path => ${2:false}\n\ snippet ri\n\ render :inline => \"${1:<%= 'hello' %>}\"\n\ snippet ril\n\ render :inline => \"${1:<%= 'hello' %>}\", :locals => { ${2::name} => \"${3:value}\"${4} }\n\ snippet rit\n\ render :inline => \"${1:<%= 'hello' %>}\", :type => ${2::rxml}\n\ snippet rjson\n\ render :json => ${1:text to render}\n\ snippet rl\n\ render :layout => \"${1:layoutname}\"\n\ snippet rn\n\ render :nothing => ${1:true}\n\ snippet rns\n\ render :nothing => ${1:true}, :status => ${2:401}\n\ snippet rp\n\ render :partial => \"${1:item}\"\n\ snippet rpc\n\ render :partial => \"${1:item}\", :collection => ${2:@$1s}\n\ snippet rpl\n\ render :partial => \"${1:item}\", :locals => { :${2:$1} => ${3:@$1}\n\ snippet rpo\n\ render :partial => \"${1:item}\", :object => ${2:@$1}\n\ snippet rps\n\ render :partial => \"${1:item}\", :status => ${2:500}\n\ snippet rt\n\ render :text => \"${1:text to render}\"\n\ snippet rtl\n\ render :text => \"${1:text to render}\", :layout => \"${2:layoutname}\"\n\ snippet rtlt\n\ render :text => \"${1:text to render}\", :layout => ${2:true}\n\ snippet rts\n\ render :text => \"${1:text to render}\", :status => ${2:401}\n\ snippet ru\n\ render :update do |${1:page}|\n\ $1.${2}\n\ end\n\ snippet rxml\n\ render :xml => ${1:text to render}\n\ snippet sc\n\ scope :${1:name}, :where(:@${2:field} => ${3:value})\n\ snippet sl\n\ scope :${1:name}, lambda do |${2:value}|\n\ where(\"${3:field = ?}\", ${4:bind var})\n\ end\n\ snippet sha1\n\ Digest::SHA1.hexdigest(${1:string})\n\ snippet sweeper\n\ class ${1:ModelClassName}Sweeper < ActionController::Caching::Sweeper\n\ observe $1\n\ \n\ def after_save(${2:model_class_name})\n\ expire_cache($2)\n\ end\n\ \n\ def after_destroy($2)\n\ expire_cache($2)\n\ end\n\ \n\ def expire_cache($2)\n\ expire_page\n\ end\n\ end\n\ snippet tcb\n\ t.boolean :${1:title}\n\ ${2}\n\ snippet tcbi\n\ t.binary :${1:title}, :limit => ${2:2}.megabytes\n\ ${3}\n\ snippet tcd\n\ t.decimal :${1:title}, :precision => ${2:10}, :scale => ${3:2}\n\ ${4}\n\ snippet tcda\n\ t.date :${1:title}\n\ ${2}\n\ snippet tcdt\n\ t.datetime :${1:title}\n\ ${2}\n\ snippet tcf\n\ t.float :${1:title}\n\ ${2}\n\ snippet tch\n\ t.change :${1:name}, :${2:string}, :${3:limit} => ${4:80}\n\ ${5}\n\ snippet tci\n\ t.integer :${1:title}\n\ ${2}\n\ snippet tcl\n\ t.integer :lock_version, :null => false, :default => 0\n\ ${1}\n\ snippet tcr\n\ t.references :${1:taggable}, :polymorphic => { :default => '${2:Photo}' }\n\ ${3}\n\ snippet tcs\n\ t.string :${1:title}\n\ ${2}\n\ snippet tct\n\ t.text :${1:title}\n\ ${2}\n\ snippet tcti\n\ t.time :${1:title}\n\ ${2}\n\ snippet tcts\n\ t.timestamp :${1:title}\n\ ${2}\n\ snippet tctss\n\ t.timestamps\n\ ${1}\n\ snippet va\n\ validates_associated :${1:attribute}\n\ snippet vao\n\ validates_acceptance_of :${1:terms}\n\ snippet vc\n\ validates_confirmation_of :${1:attribute}\n\ snippet ve\n\ validates_exclusion_of :${1:attribute}, :in => ${2:%w( mov avi )}\n\ snippet vf\n\ validates_format_of :${1:attribute}, :with => /${2:regex}/\n\ snippet vi\n\ validates_inclusion_of :${1:attribute}, :in => %w(${2: mov avi })\n\ snippet vl\n\ validates_length_of :${1:attribute}, :within => ${2:3}..${3:20}\n\ snippet vn\n\ validates_numericality_of :${1:attribute}\n\ snippet vpo\n\ validates_presence_of :${1:attribute}\n\ snippet vu\n\ validates_uniqueness_of :${1:attribute}\n\ snippet wants\n\ wants.${1:js|xml|html} { ${2} }\n\ snippet wc\n\ where(${1:\"conditions\"}${2:, bind_var})\n\ snippet wh\n\ where(${1:field} => ${2:value})\n\ snippet xdelete\n\ xhr :delete, :${1:destroy}, :id => ${2:1}${3}\n\ snippet xget\n\ xhr :get, :${1:show}, :id => ${2:1}${3}\n\ snippet xpost\n\ xhr :post, :${1:create}, :${2:object} => { ${3} }\n\ snippet xput\n\ xhr :put, :${1:update}, :id => ${2:1}, :${3:object} => { ${4} }${5}\n\ snippet test\n\ test \"should ${1:do something}\" do\n\ ${2}\n\ end\n\ #migrations\n\ snippet mac\n\ add_column :${1:table_name}, :${2:column_name}, :${3:data_type}\n\ snippet mrc\n\ remove_column :${1:table_name}, :${2:column_name}\n\ snippet mrnc\n\ rename_column :${1:table_name}, :${2:old_column_name}, :${3:new_column_name}\n\ snippet mcc\n\ change_column :${1:table}, :${2:column}, :${3:type}\n\ snippet mccc\n\ t.column :${1:title}, :${2:string}\n\ snippet mct\n\ create_table :${1:table_name} do |t|\n\ t.column :${2:name}, :${3:type}\n\ end\n\ snippet migration\n\ class ${1:class_name} < ActiveRecord::Migration\n\ def self.up\n\ ${2}\n\ end\n\ \n\ def self.down\n\ end\n\ end\n\ \n\ snippet trc\n\ t.remove :${1:column}\n\ snippet tre\n\ t.rename :${1:old_column_name}, :${2:new_column_name}\n\ ${3}\n\ snippet tref\n\ t.references :${1:model}\n\ \n\ #rspec\n\ snippet it\n\ it \"${1:spec_name}\" do\n\ ${2}\n\ end\n\ snippet itp\n\ it \"${1:spec_name}\"\n\ ${2}\n\ snippet desc\n\ describe ${1:class_name} do\n\ ${2}\n\ end\n\ snippet cont\n\ context \"${1:message}\" do\n\ ${2}\n\ end\n\ snippet bef\n\ before :${1:each} do\n\ ${2}\n\ end\n\ snippet aft\n\ after :${1:each} do\n\ ${2}\n\ end\n\ "; exports.scope = "ruby"; });
gpl-2.0
angrycactus/social-commerce
modules/commerce/modules/cart/src/EventSubscriber/OrderEventSubscriber.php
1270
<?php namespace Drupal\commerce_cart\EventSubscriber; use Symfony\Component\EventDispatcher\EventSubscriberInterface; use Drupal\commerce_cart\CartProviderInterface; use Drupal\state_machine\Event\WorkflowTransitionEvent; class OrderEventSubscriber implements EventSubscriberInterface { /** * The cart provider. * * @var \Drupal\commerce_cart\CartProviderInterface */ protected $cartProvider; /** * Constructs a new OrderEventSubscriber object. * * @param \Drupal\commerce_cart\CartProviderInterface $cart_provider * The cart provider. */ public function __construct(CartProviderInterface $cart_provider) { $this->cartProvider = $cart_provider; } /** * {@inheritdoc} */ public static function getSubscribedEvents() { $events = [ 'commerce_order.place.pre_transition' => 'finalizeCart', ]; return $events; } /** * Finalizes the cart when the order is placed. * * @param \Drupal\state_machine\Event\WorkflowTransitionEvent $event * The workflow transition event. */ public function finalizeCart(WorkflowTransitionEvent $event) { $order = $event->getEntity(); if ($order->cart->value == TRUE) { $this->cartProvider->finalizeCart($order, FALSE); } } }
gpl-2.0
md-5/jdk10
test/hotspot/jtreg/vmTestbase/nsk/jvmti/scenarios/hotswap/HS302/hs302t005r/MyClass.java
1452
/* * Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package nsk.jvmti.scenarios.hotswap.HS302.hs302t005r; public class MyClass { private String name; public MyClass() { System.out.println(" Constructor.."); } private void setName(String name) { this.name = name; this.name="private"; } public String toString() { return name; } public boolean equals(Object obj) { return name.equals(obj.toString()); } }
gpl-2.0
Morerice/piwik
tests/lib/mocha-3.1.2/test/browser-fixtures/tdd.fixture.js
81
'use strict'; /* eslint-env browser */ window.mocha.timeout(200) .ui('tdd');
gpl-3.0
carlesfernandez/pybombs
mod_pybombs/plex/scanners.py
14034
# # Copyright 2013 Tim O'Shea # # This file is part of PyBOMBS # # PyBOMBS is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # PyBOMBS is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PyBOMBS; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # """ Python Lexical Analyser Scanning an input stream """ import errors from regexps import BOL, EOL, EOF class Scanner: """ A Scanner is used to read tokens from a stream of characters using the token set specified by a Plex.Lexicon. Constructor: Scanner(lexicon, stream, name = '') See the docstring of the __init__ method for details. Methods: See the docstrings of the individual methods for more information. read() --> (value, text) Reads the next lexical token from the stream. position() --> (name, line, col) Returns the position of the last token read using the read() method. begin(state_name) Causes scanner to change state. produce(value [, text]) Causes return of a token value to the caller of the Scanner. """ lexicon = None # Lexicon stream = None # file-like object name = '' buffer = '' buf_start_pos = 0 # position in input of start of buffer next_pos = 0 # position in input of next char to read cur_pos = 0 # position in input of current char cur_line = 1 # line number of current char cur_line_start = 0 # position in input of start of current line start_pos = 0 # position in input of start of token start_line = 0 # line number of start of token start_col = 0 # position in line of start of token text = None # text of last token read initial_state = None # Node state_name = '' # Name of initial state queue = None # list of tokens to be returned trace = 0 def __init__(self, lexicon, stream, name=''): """ Scanner(lexicon, stream, name = '') |lexicon| is a Plex.Lexicon instance specifying the lexical tokens to be recognised. |stream| can be a file object or anything which implements a compatible read() method. |name| is optional, and may be the name of the file being scanned or any other identifying string. """ self.lexicon = lexicon self.stream = stream self.name = name self.queue = [] self.initial_state = None self.begin('') self.next_pos = 0 self.cur_pos = 0 self.cur_line_start = 0 self.cur_char = BOL self.input_state = 1 def read(self): """ Read the next lexical token from the stream and return a tuple (value, text), where |value| is the value associated with the token as specified by the Lexicon, and |text| is the actual string read from the stream. Returns (None, '') on end of file. """ queue = self.queue while not queue: self.text, action = self.scan_a_token() if action is None: self.produce(None) self.eof() else: value = action.perform(self, self.text) if value is not None: self.produce(value) result = queue[0] del queue[0] return result def scan_a_token(self): """ Read the next input sequence recognised by the machine and return (text, action). Returns ('', None) on end of file. """ self.start_pos = self.cur_pos self.start_line = self.cur_line self.start_col = self.cur_pos - self.cur_line_start # if self.trace: # action = self.run_machine() # else: # action = self.run_machine_inlined() action = self.run_machine_inlined() if action: if self.trace: print "Scanner: read: Performing", action, "%d:%d" % ( self.start_pos, self.cur_pos) base = self.buf_start_pos text = self.buffer[self.start_pos - base: self.cur_pos - base] return (text, action) else: if self.cur_pos == self.start_pos: if self.cur_char == EOL: self.next_char() if not self.cur_char or self.cur_char == EOF: return ('', None) import pdb #pdb.set_trace() raise errors.UnrecognizedInput(self, self.state_name) def run_machine(self): """ Run the machine until no more transitions are possible. """ self.state = self.initial_state self.backup_state = None while self.transition(): pass return self.back_up() def run_machine_inlined(self): """ Inlined version of run_machine for speed. """ state = self.initial_state cur_pos = self.cur_pos cur_line = self.cur_line cur_line_start = self.cur_line_start cur_char = self.cur_char input_state = self.input_state next_pos = self.next_pos buffer = self.buffer buf_start_pos = self.buf_start_pos buf_len = len(buffer) backup_state = None trace = self.trace while 1: if trace: #TRACE# print "State %d, %d/%d:%s -->" % ( #TRACE# state['number'], input_state, cur_pos, repr(cur_char)) #TRACE# # Begin inlined self.save_for_backup() #action = state.action #@slow action = state['action'] #@fast if action: backup_state = ( action, cur_pos, cur_line, cur_line_start, cur_char, input_state, next_pos) # End inlined self.save_for_backup() c = cur_char #new_state = state.new_state(c) #@slow new_state = state.get(c, -1) #@fast if new_state == -1: #@fast new_state = c and state.get('else') #@fast if new_state: if trace: #TRACE# print "State %d" % new_state['number'] #TRACE# state = new_state # Begin inlined: self.next_char() if input_state == 1: cur_pos = next_pos # Begin inlined: c = self.read_char() buf_index = next_pos - buf_start_pos if buf_index < buf_len: c = buffer[buf_index] next_pos = next_pos + 1 else: discard = self.start_pos - buf_start_pos data = self.stream.read(0x1000) buffer = self.buffer[discard:] + data self.buffer = buffer buf_start_pos = buf_start_pos + discard self.buf_start_pos = buf_start_pos buf_len = len(buffer) buf_index = buf_index - discard if data: c = buffer[buf_index] next_pos = next_pos + 1 else: c = '' # End inlined: c = self.read_char() if c == '\n': cur_char = EOL input_state = 2 elif not c: cur_char = EOL input_state = 4 else: cur_char = c elif input_state == 2: cur_char = '\n' input_state = 3 elif input_state == 3: cur_line = cur_line + 1 cur_line_start = cur_pos = next_pos cur_char = BOL input_state = 1 elif input_state == 4: cur_char = EOF input_state = 5 else: # input_state = 5 cur_char = '' # End inlined self.next_char() else: # not new_state if trace: #TRACE# print "blocked" #TRACE# # Begin inlined: action = self.back_up() if backup_state: (action, cur_pos, cur_line, cur_line_start, cur_char, input_state, next_pos) = backup_state else: action = None break # while 1 # End inlined: action = self.back_up() self.cur_pos = cur_pos self.cur_line = cur_line self.cur_line_start = cur_line_start self.cur_char = cur_char self.input_state = input_state self.next_pos = next_pos if trace: #TRACE# if action: #TRACE# print "Doing", action #TRACE# return action # def transition(self): # self.save_for_backup() # c = self.cur_char # new_state = self.state.new_state(c) # if new_state: # if self.trace: # print "Scanner: read: State %d: %s --> State %d" % ( # self.state.number, repr(c), new_state.number) # self.state = new_state # self.next_char() # return 1 # else: # if self.trace: # print "Scanner: read: State %d: %s --> blocked" % ( # self.state.number, repr(c)) # return 0 # def save_for_backup(self): # action = self.state.get_action() # if action: # if self.trace: # print "Scanner: read: Saving backup point at", self.cur_pos # self.backup_state = ( # action, self.cur_pos, self.cur_line, self.cur_line_start, # self.cur_char, self.input_state, self.next_pos) # def back_up(self): # backup_state = self.backup_state # if backup_state: # (action, self.cur_pos, self.cur_line, self.cur_line_start, # self.cur_char, self.input_state, self.next_pos) = backup_state # if self.trace: # print "Scanner: read: Backing up to", self.cur_pos # return action # else: # return None def next_char(self): input_state = self.input_state if self.trace: print "Scanner: next:", " " * 20, "[%d] %d" % ( input_state, self.cur_pos), if input_state == 1: self.cur_pos = self.next_pos c = self.read_char() if c == '\n': self.cur_char = EOL self.input_state = 2 elif not c: self.cur_char = EOL self.input_state = 4 else: self.cur_char = c elif input_state == 2: self.cur_char = '\n' self.input_state = 3 elif input_state == 3: self.cur_line = self.cur_line + 1 self.cur_line_start = self.cur_pos = self.next_pos self.cur_char = BOL self.input_state = 1 elif input_state == 4: self.cur_char = EOF self.input_state = 5 else: # input_state = 5 self.cur_char = '' if self.trace: print "--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char)) # def read_char(self): # """ # Get the next input character, filling the buffer if necessary. # Returns '' at end of file. # """ # next_pos = self.next_pos # buf_index = next_pos - self.buf_start_pos # if buf_index == len(self.buffer): # discard = self.start_pos - self.buf_start_pos # data = self.stream.read(0x1000) # self.buffer = self.buffer[discard:] + data # self.buf_start_pos = self.buf_start_pos + discard # buf_index = buf_index - discard # if not data: # return '' # c = self.buffer[buf_index] # self.next_pos = next_pos + 1 # return c def position(self): """ Return a tuple (name, line, col) representing the location of the last token read using the read() method. |name| is the name that was provided to the Scanner constructor; |line| is the line number in the stream (1-based); |col| is the position within the line of the first character of the token (0-based). """ return (self.name, self.start_line, self.start_col) def begin(self, state_name): """Set the current state of the scanner to the named state.""" self.initial_state = ( self.lexicon.get_initial_state(state_name)) self.state_name = state_name def produce(self, value, text=None): """ Called from an action procedure, causes |value| to be returned as the token value from read(). If |text| is supplied, it is returned in place of the scanned text. produce() can be called more than once during a single call to an action procedure, in which case the tokens are queued up and returned one at a time by subsequent calls to read(), until the queue is empty, whereupon scanning resumes. """ if text is None: text = self.text self.queue.append((value, text)) def eof(self): """ Override this method if you want something to be done at end of file. """ # For backward compatibility: setattr(Scanner, "yield", Scanner.produce)
gpl-3.0
pheebsco/memoji-webogram
app/js/background.js
489
/*! * Webogram v0.5.5 - messaging web application for MTProto * https://github.com/zhukov/webogram * Copyright (C) 2014 Igor Zhukov <[email protected]> * https://github.com/zhukov/webogram/blob/master/LICENSE */ chrome.app.runtime.onLaunched.addListener(function (launchData) { chrome.app.window.create('../index.html', { id: 'webogram-chat', innerBounds: { width: 1000, height: 700 }, minWidth: 320, minHeight: 400, frame: 'chrome' }) })
gpl-3.0
lynnlyc/for-honeynet-reviewers
CallbackDroid/android-environment/src/libcore/luni/src/test/java/com/google/coretests/CoreTestPrinter.java
4942
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.coretests; import java.io.PrintStream; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestResult; import junit.textui.ResultPrinter; /** * A special ResultPrinter implementation that displays additional statistics * about the test that have been executed. */ public class CoreTestPrinter extends ResultPrinter { /** * The last test class we executed. */ private Class<?> fLastClass; /** * The current output column for dots. */ private int fColumn; /** * The time it took to execute the tests. */ private int fRunTime; /** * The flags the user specified. */ private int fFlags; /** * Creates a new CoreTestPrinter for the given parameters. */ public CoreTestPrinter(PrintStream writer, int flags) { super(writer); fFlags = flags; } @Override protected void printHeader(long runTime) { fRunTime = (int)(runTime / 1000); if (fColumn != 0) { getWriter().println(); } getWriter().println(); } @Override protected void printFooter(TestResult result) { CoreTestResult coreResult = (CoreTestResult)result; PrintStream printer = getWriter(); if (fColumn != 0) { printer.println(); } printer.println(); printer.println("Total tests : " + coreResult.fTotalTestCount); printer.println("Tests run : " + coreResult.runCount()); printer.println("Tests ignored : " + coreResult.fIgnoredCount); printer.println(); printer.println("Normal tests : " + coreResult.fNormalTestCount); printer.println("Android-only : " + coreResult.fAndroidOnlyCount); printer.println("Broken tests : " + coreResult.fBrokenTestCount); printer.println("Known failures: " + coreResult.fKnownFailureCount); printer.println("Side-effects : " + coreResult.fSideEffectCount); printMemory(); int seconds = fRunTime; int hours = seconds / 3600; seconds = seconds % 3600; int minutes = seconds / 60; seconds = seconds % 60; String text = String.format("%02d:%02d:%02d", hours, minutes, seconds); printer.println(); printer.println("Time taken : " + text); super.printFooter(result); } /** * Dumps some memory info. */ private void printMemory() { PrintStream printer = getWriter(); Runtime runtime = Runtime.getRuntime(); long total = runtime.totalMemory(); long free = runtime.freeMemory(); long used = total - free; printer.println(); printer.println("Total memory : " + total); printer.println("Used memory : " + used); printer.println("Free memory : " + free); } @Override public void startTest(Test test) { TestCase caze = (TestCase)test; if (fLastClass == null || caze.getClass().getPackage() != fLastClass.getPackage()) { if (fColumn != 0) { getWriter().println(); fColumn = 0; } getWriter().println(); Package pack = caze.getClass().getPackage(); getWriter().println(pack == null ? "Default package" : pack.getName()); getWriter().println(); } if ((fFlags & CoreTestSuite.VERBOSE) != 0) { if (caze.getClass() != fLastClass) { if (fColumn != 0) { getWriter().println(); fColumn = 0; } String name = caze.getClass().getSimpleName().toString(); printMemory(); getWriter().println("Now executing : " + name); getWriter().println(); } } getWriter().print("."); if (fColumn++ >= 40) { getWriter().println(); fColumn= 0; } fLastClass = caze.getClass(); } @Override public void addError(Test test, Throwable t) { if (t instanceof CoreTestTimeout) { getWriter().print("T"); } else { super.addError(test, t); } } }
gpl-3.0
ContinuumIO/cloud9
plugins-server/cloud9.ide.search/search.js
11368
/** * Search module for the Cloud9 IDE * * @copyright 2012, Ajax.org B.V. * @license GPLv3 <http://www.gnu.org/licenses/gpl.txt> */ "use strict"; var Os = require("os"); var Path = require("path"); module.exports = function() { this.env = { grepCmd: "grep", perlCmd: "perl", platform: Os.platform(), basePath: "" }; this.setEnv = function(newEnv) { var self = this; Object.keys(this.env).forEach(function(e) { if (newEnv[e]) self.env[e] = newEnv[e]; }); }; this.exec = function(options, vfs, onData, onExit) { var path = options.path; if (options.path === null) return true; options.uri = path; options.path = Path.normalize(this.env.basePath + (path ? "/" + path : "")); // if the relative path FROM the workspace directory TO the requested path // is outside of the workspace directory, the result of Path.relative() will // start with '../', which we can trap and use: if (Path.relative(this.env.basePath, options.path).indexOf("../") === 0) return false; var args = this.assembleCommand(options); if (!args) return false; this.options = options; var self = this; if (this.activeProcess) this.activeProcess.kill("SIGKILL"); vfs.spawn(args.command, { args: args, cwd: options.path, stdoutEncoding: "utf8", stderrEncoding: "utf8" }, function(err, meta) { if (err || !meta.process) return onExit(1, err, { count: 0, filecount: 0 }); var child = meta.process; self.activeProcess = child; var stderr = ""; var prevFile = null; var filecount = 0; var count = 0; child.stdout.on("data", function(data) { var msg = self.parseResult(prevFile, options, data); count += msg.count; filecount += msg.filecount; prevFile = msg.prevFile; if (msg) onData(msg); }); child.stderr.on("data", function(data) { stderr += data; }); child.on("exit", function(code) { self.processCount -= 1; onExit(code, stderr, { count: count, filecount: filecount }); }); }); return true; }; this.assembleCommand = function(options) { var include = ""; var cmd = this.env.grepCmd + " -s -r --color=never --binary-files=without-match -n " + (!options.casesensitive ? "-i " : "") + (process.platform != "darwin" ? "-P " : ""); if (options.pattern) { // handles grep peculiarities with --include if (options.pattern.split(",").length > 1) include = "{" + options.pattern + "}"; else include = options.pattern; } else { include = (process.platform != "darwin" ? "\\" : "") + "*{" + PATTERN_EXT + "}"; } if (options.maxresults) cmd += "-m " + parseInt(options.maxresults, 10); if (options.wholeword) cmd += " -w"; var query = options.query; if (!query) return; // grep has a funny way of handling new lines (that is to say, it's non-existent) // if we're not doing a regex search, then we must split everything between the // new lines, escape the content, and then smush it back together; due to // new lines, this is also why we're now passing -P as default to grep if (!options.replaceAll && !options.regexp) { var splitQuery = query.split("\\n"); for (var q in splitQuery) { splitQuery[q] = grepEscapeRegExp(splitQuery[q]); } query = splitQuery.join("\\n"); } query = query.replace(new RegExp("\\\'", "g"), "'\\''"); // ticks must be double escaped for BSD grep cmd += " --exclude=*{" + PATTERN_EDIR + "}*" + " --include=" + include + " '" + query.replace(/-/g, "\\-") + "'" + " \"" + escapeShell(options.path) + "\""; if (options.replaceAll) { if (!options.replacement) options.replacement = ""; if (options.regexp) query = escapeRegExp(query); // pipe the grep results into perl cmd += " -l | xargs " + this.env.perlCmd + // print the grep result to STDOUT (to arrange in parseSearchResult()) " -pi -e 'print STDOUT \"$ARGV:$.:$_\"" + // do the actual replace " if s/" + query + "/" + options.replacement + "/mg" + ( options.casesensitive ? "" : "i" ) + ";'"; } var args = ["-c", cmd]; args.command = "bash"; return args; }; this.parseResult = function(prevFile, options, data) { if (typeof data !== "string" || data.indexOf("\n") === -1) return { count: 0, filecount: 0, data: "" }; var parts, file, lineno, result = ""; var aLines = data.split(/([\n\r]+)/g); var count = 0; var filecount = 0; if (options) { for (var i = 0, l = aLines.length; i < l; ++i) { parts = aLines[i].split(":"); if (parts.length < 3) continue; var _path = parts.shift().replace(options.path, "").trimRight(); file = encodeURI(options.uri + _path, "/"); lineno = parseInt(parts.shift(), 10); if (!lineno) continue; ++count; if (file !== prevFile) { ++filecount; if (prevFile) result += "\n \n"; result += file + ":"; prevFile = file; } result += "\n\t" + lineno + ": " + parts.join(":"); } } else { console.error("options object doesn't exist", data); } return { count: count, filecount: filecount, prevFile: prevFile, data: result }; }; // util var makeUnique = function(arr){ var i, length, newArr = []; for (i = 0, length = arr.length; i < length; i++) { if (newArr.indexOf(arr[i]) == -1) newArr.push(arr[i]); } arr.length = 0; for (i = 0, length = newArr.length; i < length; i++) arr.push(newArr[i]); return arr; }; var escapeRegExp = function(str) { return str.replace(/([.*+?\^${}()|\[\]\/\\])/g, "\\$1"); }; // taken from http://xregexp.com/ var grepEscapeRegExp = function(str) { return str.replace(/[[\]{}()*+?.,\\^$|#\s"']/g, "\\$&"); }; var escapeShell = function(str) { return str.replace(/([\\"'`$\s\(\)<>])/g, "\\$1"); }; // file types var IGNORE_DIRS = { ".bzr" : "Bazaar", ".cdv" : "Codeville", "~.dep" : "Interface Builder", "~.dot" : "Interface Builder", "~.nib" : "Interface Builder", "~.plst" : "Interface Builder", ".git" : "Git", ".hg" : "Mercurial", ".pc" : "quilt", ".svn" : "Subversion", "_MTN" : "Monotone", "blib" : "Perl module building", "CVS" : "CVS", "RCS" : "RCS", "SCCS" : "SCCS", "_darcs" : "darcs", "_sgbak" : "Vault/Fortress", "autom4te.cache" : "autoconf", "cover_db" : "Devel::Cover", "_build" : "Module::Build" }; var MAPPINGS = { "actionscript": ["as", "mxml"], "ada" : ["ada", "adb", "ads"], "asm" : ["asm", "s"], "batch" : ["bat", "cmd"], //"binary" : q{Binary files, as defined by Perl's -B op (default: off)}, "cc" : ["c", "h", "xs"], "cfmx" : ["cfc", "cfm", "cfml"], "clojure" : ["clj"], "cpp" : ["cpp", "cc", "cxx", "m", "hpp", "hh", "h", "hxx"], "csharp" : ["cs"], "css" : ["css", "less", "scss", "sass"], "coffee" : ["coffee"], "elisp" : ["el"], "erlang" : ["erl", "hrl"], "fortran" : ["f", "f77", "f90", "f95", "f03", "for", "ftn", "fpp"], "haskell" : ["hs", "lhs"], "hh" : ["h"], "html" : ["htm", "html", "shtml", "xhtml"], "jade" : ["jade"], "java" : ["java", "properties"], "groovy" : ["groovy"], "js" : ["js"], "json" : ["json"], "latex" : ["latex", "ltx"], "jsp" : ["jsp", "jspx", "jhtm", "jhtml"], "lisp" : ["lisp", "lsp"], "logiql" : ["logic", "lql"], "lua" : ["lua"], "make" : ["makefile", "Makefile"], "mason" : ["mas", "mhtml", "mpl", "mtxt"], "markdown" : ["md", "markdown"], "objc" : ["m", "h"], "objcpp" : ["mm", "h"], "ocaml" : ["ml", "mli"], "parrot" : ["pir", "pasm", "pmc", "ops", "pod", "pg", "tg"], "perl" : ["pl", "pm", "pod", "t"], "php" : ["php", "phpt", "php3", "php4", "php5", "phtml"], "plone" : ["pt", "cpt", "metadata", "cpy", "py"], "powershell" : ["ps1"], "python" : ["py"], "rake" : ["rakefile"], "ruby" : ["rb", "ru", "rhtml", "rjs", "rxml", "erb", "rake", "gemspec"], "scala" : ["scala"], "scheme" : ["scm", "ss"], "shell" : ["sh", "bash", "csh", "tcsh", "ksh", "zsh"], //"skipped" : "q"{"Files but not directories normally skipped by ack ("default": "off")}, "smalltalk" : ["st"], "sql" : ["sql", "ctl"], "tcl" : ["tcl", "itcl", "itk"], "tex" : ["tex", "cls", "sty"], "text" : ["txt"], "textile" : ["textile"], "tt" : ["tt", "tt2", "ttml"], "vb" : ["bas", "cls", "frm", "ctl", "vb", "resx"], "vim" : ["vim"], "yaml" : ["yaml", "yml"], "xml" : ["xml", "dtd", "xslt", "ent", "rdf", "rss", "svg", "wsdl", "atom", "mathml", "mml"] }; var exts = []; for (var type in MAPPINGS) { exts = exts.concat(MAPPINGS[type]); } // grep pattern matching for extensions var PATTERN_EXT = makeUnique(exts).join(","); var dirs = []; for (type in IGNORE_DIRS) { dirs.push(type); } dirs = makeUnique(dirs); var PATTERN_DIR = escapeRegExp(dirs.join("|")); var PATTERN_EDIR = dirs.join(","); };
gpl-3.0
stahta01/codeblocks-svn2git
src/plugins/contrib/NassiShneiderman/InsertBrickTask.cpp
5166
#include "InsertBrickTask.h" #include "GraphBricks.h" #include "RedLineDrawlet.h" #include "RedHatchDrawlet.h" #include "NassiFileContent.h" #include "commands.h" #include "NassiView.h" #include <wx/mstream.h> #if defined(__WXGTK__) #include "rc/ifcur_inv.xpm" #include "rc/whilecur_inv.xpm" #include "rc/dowhilecur_inv.xpm" #include "rc/instrcur_inv.xpm" #include "rc/forcur_inv.xpm" #include "rc/switchcur_inv.xpm" #include "rc/breakcur_inv.xpm" #include "rc/continuecur_inv.xpm" #include "rc/returncur_inv.xpm" #include "rc/blockcur_inv.xpm" #else #include "rc/ifcur.xpm" #include "rc/whilecur.xpm" #include "rc/dowhilecur.xpm" #include "rc/instrcur.xpm" #include "rc/forcur.xpm" #include "rc/switchcur.xpm" #include "rc/breakcur.xpm" #include "rc/continuecur.xpm" #include "rc/returncur.xpm" #include "rc/blockcur.xpm" #endif InsertBrickTask::InsertBrickTask(NassiView *view, NassiFileContent *nfc, NassiView::NassiTools tool): Task(), m_view(view), m_nfc(nfc), m_done(false), m_tool(tool) {} InsertBrickTask::~InsertBrickTask() {} wxCursor InsertBrickTask::Start() { switch ( m_tool ) { case NassiView::NASSI_TOOL_CONTINUE: return wxCursor(continuecur_xpm); case NassiView::NASSI_TOOL_BREAK: return wxCursor(breakcur_xpm); case NassiView::NASSI_TOOL_RETURN: return wxCursor(returncur_xpm); case NassiView::NASSI_TOOL_WHILE: return wxCursor(whilecur_xpm); case NassiView::NASSI_TOOL_DOWHILE: return wxCursor(dowhilecur_xpm); case NassiView::NASSI_TOOL_FOR: return wxCursor(forcur_xpm); case NassiView::NASSI_TOOL_BLOCK: return wxCursor(blockcur_xpm); case NassiView::NASSI_TOOL_IF: return wxCursor(wxImage(ifcur_xpm)); case NassiView::NASSI_TOOL_SWITCH: return wxCursor(switchcur_xpm); default: case NassiView::NASSI_TOOL_ESC: case NassiView::NASSI_TOOL_SELECT: case NassiView::NASSI_TOOL_PASTE: case NassiView::NASSI_TOOL_INSTRUCTION: return wxCursor(instrcur_xpm); } } bool InsertBrickTask::Done()const { return m_done; } // events from window: void InsertBrickTask::OnMouseLeftUp(wxMouseEvent & /*event*/, const wxPoint & /*position*/){} void InsertBrickTask::OnMouseLeftDown(wxMouseEvent & /*event*/, const wxPoint &position) { if ( !m_nfc->GetFirstBrick() ) { wxRect rect = m_view->GetEmptyRootRect(); if ( rect.Contains(position) ) m_nfc->GetCommandProcessor()->Submit( new NassiInsertFirstBrick(m_nfc, m_view->GenerateNewBrick(m_tool)) ); return; } GraphNassiBrick *gbrick = m_view->GetBrickAtPosition(position); if ( gbrick ) { GraphNassiBrick::Position p = gbrick->GetPosition(position); if ( p.pos == GraphNassiBrick::Position::bottom ) m_nfc->GetCommandProcessor()->Submit( new NassiInsertBrickAfter( m_nfc, gbrick->GetBrick(), m_view->GenerateNewBrick(m_tool) )); else if ( p.pos == GraphNassiBrick::Position::top ) m_nfc->GetCommandProcessor()->Submit( new NassiInsertBrickBefore(m_nfc, gbrick->GetBrick(), m_view->GenerateNewBrick(m_tool) )); else if ( p.pos == GraphNassiBrick::Position::child ) m_nfc->GetCommandProcessor()->Submit( new NassiInsertChildBrickCommand(m_nfc, gbrick->GetBrick(), m_view->GenerateNewBrick(m_tool), p.number)); else if ( p.pos == GraphNassiBrick::Position::childindicator ) m_nfc->GetCommandProcessor()->Submit( new NassiAddChildIndicatorCommand(m_nfc, gbrick->GetBrick(), m_view->GenerateNewBrick(m_tool), p.number)); } } void InsertBrickTask::OnMouseRightDown(wxMouseEvent & /*event*/, const wxPoint & /*position*/) { m_done = true; } void InsertBrickTask::OnMouseRightUp(wxMouseEvent& /*event*/, const wxPoint & /*position*/){} HooverDrawlet *InsertBrickTask::OnMouseMove(wxMouseEvent & /*event*/, const wxPoint &position) { if ( !m_nfc->GetFirstBrick() ) { wxRect rect = m_view->GetEmptyRootRect(); if ( rect.Contains(position) ) return new RedHatchDrawlet(rect); else return 0; } GraphNassiBrick *gbrick = m_view->GetBrickAtPosition(position); if ( gbrick ) return gbrick->GetDrawlet(position, false); return 0; } void InsertBrickTask::OnKeyDown(wxKeyEvent &event) { if ( event.GetKeyCode() == WXK_ESCAPE ) { m_done = true; return; } //event.Skip(); } void InsertBrickTask::OnChar(wxKeyEvent & /*event*/){} // events from frame(s) bool InsertBrickTask::CanEdit()const{ return false; } //bool InsertBrickTask::CanCopy()const{ return false; } //bool InsertBrickTask::CanCut()const{ return false; } bool InsertBrickTask::CanPaste()const{ return false; } bool InsertBrickTask::HasSelection()const{ return false; } void InsertBrickTask::DeleteSelection(){} void InsertBrickTask::Copy(){} void InsertBrickTask::Cut(){} void InsertBrickTask::Paste(){}
gpl-3.0
ramdesh/moodle
enrol/editinstance_form.php
2628
<?php // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /** * Adds new instance of enrol_plugin to specified course or edits current instance. * * @package core_enrol * @copyright 2015 Damyon Wiese * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ defined('MOODLE_INTERNAL') || die(); require_once($CFG->libdir.'/formslib.php'); /** * Standard edit form shared by all enrol plugins. * * @package core_enrol * @copyright 2015 Damyon Wiese * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ class enrol_instance_edit_form extends moodleform { /** * Called to define this moodle form * * @return void */ public function definition() { global $DB; $mform = $this->_form; list($instance, $plugin, $context, $type) = $this->_customdata; $mform->addElement('header', 'header', get_string('pluginname', 'enrol_' . $type)); $plugin->edit_instance_form($instance, $mform, $context); $mform->addElement('hidden', 'id'); $mform->setType('id', PARAM_INT); $mform->addElement('hidden', 'courseid'); $mform->setType('courseid', PARAM_INT); $mform->addElement('hidden', 'type'); $mform->setType('type', PARAM_COMPONENT); $instance->type = $type; $this->add_action_buttons(true, ($instance->id ? null : get_string('addinstance', 'enrol'))); $this->set_data($instance); } /** * Validate this form. Calls plugin validation method. * * @param array $data * @param array $files * @return array */ public function validation($data, $files) { $errors = parent::validation($data, $files); list($instance, $plugin, $context, $type) = $this->_customdata; $pluginerrors = $plugin->edit_instance_validation($data, $files, $instance, $context); $errors = array_merge($errors, $pluginerrors); return $errors; } }
gpl-3.0
arjitc/librenms
app/Http/Controllers/Widgets/DeviceSummaryVertController.php
1231
<?php /** * DeviceSummaryVertController.php * * -Description- * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * @package LibreNMS * @link http://librenms.org * @copyright 2018 Tony Murray * @author Tony Murray <[email protected]> */ namespace App\Http\Controllers\Widgets; use Illuminate\Http\Request; use Illuminate\View\View; class DeviceSummaryVertController extends DeviceSummaryController { /** * @param Request $request * @return View */ public function getView(Request $request) { return view('widgets.device-summary-vert', $this->getData($request)); } }
gpl-3.0
PatchRanger/mautic
plugins/MauticSocialBundle/Views/Integration/LinkedIn/share.html.php
702
<?php /* * @copyright 2014 Mautic Contributors. All rights reserved * @author Mautic * * @link http://mautic.org * * @license GNU/GPLv3 http://www.gnu.org/licenses/gpl-3.0.html */ $locale = $app->getRequest()->getLocale(); $js = <<<js <script src="//platform.linkedin.com/in.js" type="text/javascript"> lang: $locale </script> js; $counter = (!empty($settings['counter'])) ? $settings['counter'] : 'none'; $dataCounter = ($counter != 'none') ? ' data-counter="'.$settings['counter'].'"' : ''; ?> <div class="share-button linkedin-share-button layout-<?php echo $counter; ?>"> <script type="IN/Share"<?php echo $dataCounter; ?>></script> </div> <?php echo $js; ?>
gpl-3.0
bbockelm/xrootd_old_git
src/XrdOfs/XrdOfsSecurity.hh
3061
#ifndef ___XrdOfsSECURITY_H___ #define ___XrdOfsSECURITY_H___ /******************************************************************************/ /* */ /* X r d O f s S e c u r i t y . h h */ /* */ /* (C) 2003 by the Board of Trustees of the Leland Stanford, Jr., University */ /* All Rights Reserved */ /* Produced by Andrew Hanushevsky for Stanford University under contract */ /* DE-AC02-76-SFO0515 with the Deprtment of Energy */ /* */ /* This file is part of the XRootD software suite. */ /* */ /* XRootD is free software: you can redistribute it and/or modify it under */ /* the terms of the GNU Lesser General Public License as published by the */ /* Free Software Foundation, either version 3 of the License, or (at your */ /* option) any later version. */ /* */ /* XRootD is distributed in the hope that it will be useful, but WITHOUT */ /* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or */ /* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public */ /* License for more details. */ /* */ /* You should have received a copy of the GNU Lesser General Public License */ /* along with XRootD in a file called COPYING.LESSER (LGPL license) and file */ /* COPYING (GPL license). If not, see <http://www.gnu.org/licenses/>. */ /* */ /* The copyright holder's institutional names and contributor's names may not */ /* be used to endorse or promote products derived from this software without */ /* specific prior written permission of the institution or contributor. */ /******************************************************************************/ #include "XrdAcc/XrdAccAuthorize.hh" #define AUTHORIZE(usr, env, optype, action, pathp, edata) \ if (usr && XrdOfsFS->Authorization \ && !XrdOfsFS->Authorization->Access(usr, pathp, optype, env)) \ {XrdOfsFS->Emsg(epname, edata, EACCES, action, pathp); return SFS_ERROR;} #define AUTHORIZE2(usr,edata,opt1,act1,path1,env1,opt2,act2,path2,env2) \ {AUTHORIZE(usr, env1, opt1, act1, path1, edata); \ AUTHORIZE(usr, env2, opt2, act2, path2, edata); \ } #define OOIDENTENV(usr, env) \ if (usr) {if (usr->name) env.Put(SEC_USER, usr->name); \ if (usr->host) env.Put(SEC_HOST, usr->host);} #endif
gpl-3.0
vaibhavgupta3110/openemr
library/classes/ClinicalTypes/CareGoal.php
706
<?php // Copyright (C) 2011 Ken Chapple <[email protected]> // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // require_once('ClinicalType.php'); class CareGoal extends ClinicalType { const FOLLOW_UP_PLAN_BMI_MGMT = 'flwup_bmi_mgmt'; public function getListId() { return 'Clinical_Rules_Care_Goal_Types'; } public function doPatientCheck(RsPatient $patient, $beginMeasurement = null, $endMeasurement = null, $options = null) { return true; } }
gpl-3.0
pixelipo/server
apps/systemtags/l10n/et_EE.js
455
OC.L10N.register( "systemtags", { "Tags" : "Sildid", "Tagged files" : "Sildistatud failid", "Select tags to filter by" : "Vali sildid, mille järgi filtreerida", "%s (invisible)" : "%s (nähtamatu)", "No files in here" : "Siin ei ole faile", "No entries found in this folder" : "Selles kaustast ei leitud kirjeid", "Name" : "Nimi", "Size" : "Suurus", "Modified" : "Muudetud" }, "nplurals=2; plural=(n != 1);");
agpl-3.0
exercitussolus/yolo
src/main/java/org/elasticsearch/common/metrics/MeanMetric.java
1576
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.metrics; import jsr166e.LongAdder; /** */ public class MeanMetric implements Metric { private final LongAdder counter = new LongAdder(); private final LongAdder sum = new LongAdder(); public void inc(long n) { counter.increment(); sum.add(n); } public void dec(long n) { counter.decrement(); sum.add(-n); } public long count() { return counter.sum(); } public long sum() { return sum.sum(); } public double mean() { long count = count(); if (count > 0) { return sum.sum() / (double) count; } return 0.0; } public void clear() { counter.reset(); sum.reset(); } }
agpl-3.0
dmeltzer/snipe-it
resources/lang/af/admin/asset_maintenances/message.php
1057
<?php return [ 'not_found' => 'Bate-instandhouding waarna jy gesoek is, is nie gevind nie!', 'delete' => [ 'confirm' => 'Is jy seker jy wil hierdie bate instandhouding uitvee?', 'error' => 'Daar was \'n probleem met die verwydering van die bate-instandhouding. Probeer asseblief weer.', 'success' => 'Die bate-instandhouding is suksesvol verwyder.' ], 'create' => [ 'error' => 'Asset Maintenance is nie geskep nie, probeer asseblief weer.', 'success' => 'Bate Onderhoud geskep suksesvol.' ], 'edit' => [ 'error' => 'Bateonderhoud is nie geredigeer nie, probeer asseblief weer.', 'success' => 'Bate Onderhoud suksesvol geredigeer.' ], 'asset_maintenance_incomplete' => 'Nog nie voltooi nie', 'warranty' => 'waarborg', 'not_warranty' => 'Nie waarborg nie', ];
agpl-3.0
akretion/stock-logistics-workflow
product_unique_serial/__init__.py
983
# -*- coding: utf-8 -*- ############################################################################## # # Copyright 2015 Vauxoo # Author: Moisés Lopez, Osval Reyes # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import models from . import wizards
agpl-3.0
lnu/nhibernate-core
src/NHibernate.Test/NHSpecificTest/NH2470/Class1Class2DTOs.cs
185
namespace NHibernate.Test.NHSpecificTest.NH2470 { public class Class2DTO : DTO { } public class Class1DTO : DTO { public Class2DTO[] Class2Ary { get; set; } } }
lgpl-2.1
vlee489/Launcher
creator-tools/src/main/java/com/skcraft/launcher/creator/controller/task/PackLoader.java
1365
/* * SK's Minecraft Launcher * Copyright (C) 2010-2014 Albert Pham <http://www.sk89q.com> and contributors * Please see LICENSE.txt for license information. */ package com.skcraft.launcher.creator.controller.task; import com.google.common.base.Function; import com.skcraft.concurrency.ProgressObservable; import com.skcraft.launcher.creator.model.creator.Pack; import com.skcraft.launcher.creator.model.creator.Workspace; import java.util.List; public class PackLoader implements ProgressObservable, Function<Workspace, List<Pack>> { private int index; private int size = 0; private Pack lastPack; @Override public List<Pack> apply(Workspace workspace) { List<Pack> packs = workspace.getPacks(); size = packs.size(); for (Pack pack : packs) { lastPack = pack; pack.load(); index++; } lastPack = null; return packs; } @Override public double getProgress() { if (size == 0) { return -1; } else { return index / (double) size; } } @Override public String getStatus() { Pack pack = lastPack; if (pack != null) { return "Loading " + pack.getDirectory().getName() + "..."; } else { return "Enumerating packs..."; } } }
lgpl-3.0
webOS-ports/org.webosports.app.calculator
lib/layout/tree/source/design.js
587
/** Description to make Tree kind available in Ares. */ Palette.model.push( {name: "Tree", items: [ {name: "Tree", title: "Selectable sub-view", icon: "package_new.png", stars: 4.5, version: 2.0, blurb: "A component for Trees", inline: {kind: "FittableColumns", style: "height: 40px; position: relative;", padding: 4, components: [ {style: "background-color: lightgreen; border: 1px dotted green; width: 20px;"}, {style: "background-color: lightblue; border: 1px dotted blue;", fit: true}, ]}, config: {content: "$name", isContainer: true, kind: "Node"} } ]} );
apache-2.0
GillesMoris/OSS
src/org/zaproxy/zap/utils/Statistics.java
2594
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2016 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.utils; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; public class Statistics { private Map<String, Long> stats = new HashMap<String, Long>(); public void incCounter(String key) { long value = 1; if (stats.containsKey(key)) { value = stats.get(key); } stats.put(key, value+1); } public void incCounter(String key, long inc) { long value = 1; if (stats.containsKey(key)) { value = stats.get(key); } stats.put(key, value+inc); } public void decCounter(String key) { long value = 1; if (stats.containsKey(key)) { value = stats.get(key); } stats.put(key, value-1); } public void decCounter(String key, long dec) { long value = 1; if (stats.containsKey(key)) { value = stats.get(key); } stats.put(key, value-dec); } public void setHighwaterMark(String key, long value) { Long curValue = stats.get(key); if (curValue == null || value > curValue) { stats.put(key, value+1); } } public void setLowwaterMark(String key, long value) { Long curValue = stats.get(key); if (curValue == null || value < curValue) { stats.put(key, value+1); } } public Long getStat(String key) { return stats.get(key); } public Map<String, Long> getStats(String keyPrefix) { Map<String, Long> map = new HashMap<String, Long>(); for (Entry<String, Long> stat: stats.entrySet()) { if (stat.getKey().startsWith(keyPrefix)) { map.put(stat.getKey(), stat.getValue()); } } return map; } public void clearAll() { stats.clear(); } public void clear(String keyPrefix) { Iterator<Entry<String, Long>> iter = stats.entrySet().iterator(); while (iter.hasNext()) { Entry<String, Long> entry = iter.next(); if (entry.getKey().startsWith(keyPrefix)) { iter.remove(); } } } }
apache-2.0
ern/elasticsearch
modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
55601
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.percolator; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.geo.GeoPlugin; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; import static org.elasticsearch.index.query.QueryBuilders.geoPolygonQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsNull.notNullValue; public class PercolatorQuerySearchIT extends ESIntegTestCase { @Override protected boolean addMockGeoShapeFieldMapper() { return false; } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(PercolatorPlugin.class, GeoPlugin.class); } public void testPercolatorQuery() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject() .field("id", "1") .field("query", matchAllQuery()).endObject()) .get(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject() .field("id", "2") .field("query", matchQuery("field1", "value")).endObject()) .get(); client().prepareIndex("test").setId("3") .setSource(jsonBuilder().startObject() .field("id", "3") .field("query", boolQuery() .must(matchQuery("field1", "value")) .must(matchQuery("field2", "value")) ).endObject()).get(); client().admin().indices().prepareRefresh().get(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().endObject()); logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); logger.info("percolating doc with 1 field"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()) ), XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(1))); } public void testPercolatorRangeQueries() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("field1", "type=long", "field2", "type=double", "field3", "type=ip", "field4", "type=date", "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(10).to(12)).endObject()) .get(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(20).to(22)).endObject()) .get(); client().prepareIndex("test").setId("3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(rangeQuery("field1").from(10).to(12)) .must(rangeQuery("field1").from(12).to(14)) ).endObject()).get(); client().admin().indices().prepareRefresh().get(); client().prepareIndex("test").setId("4") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(10).to(12)).endObject()) .get(); client().prepareIndex("test").setId("5") .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(20).to(22)).endObject()) .get(); client().prepareIndex("test").setId("6") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(rangeQuery("field2").from(10).to(12)) .must(rangeQuery("field2").from(12).to(14)) ).endObject()).get(); client().admin().indices().prepareRefresh().get(); client().prepareIndex("test").setId("7") .setSource(jsonBuilder().startObject() .field("query", rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")) .endObject()) .get(); client().prepareIndex("test").setId("8") .setSource(jsonBuilder().startObject() .field("query", rangeQuery("field3").from("192.168.1.20").to("192.168.1.30")) .endObject()) .get(); client().prepareIndex("test").setId("9") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")) .must(rangeQuery("field3").from("192.168.1.5").to("192.168.1.10")) ).endObject()).get(); client().prepareIndex("test").setId("10") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(rangeQuery("field4").from("2010-01-01").to("2018-01-01")) .must(rangeQuery("field4").from("2010-01-01").to("now")) ).endObject()).get(); client().admin().indices().prepareRefresh().get(); // Test long range: BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 12).endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); logger.info("response={}", response); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 11).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); // Test double range: source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 12).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("6")); assertThat(response.getHits().getAt(1).getId(), equalTo("4")); source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 11).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("4")); // Test IP range: source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.5").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("9")); assertThat(response.getHits().getAt(1).getId(), equalTo("7")); source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.4").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("7")); // Test date range: source = BytesReference.bytes(jsonBuilder().startObject().field("field4", "2016-05-15").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("10")); } public void testPercolatorGeoQueries() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("id", "type=keyword", "field1", "type=geo_point", "field2", "type=geo_shape", "query", "type=percolator")); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject() .field("query", geoDistanceQuery("field1").point(52.18, 4.38).distance(50, DistanceUnit.KILOMETERS)) .field("id", "1") .endObject()).get(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject() .field("query", geoBoundingBoxQuery("field1").setCorners(52.3, 4.4, 52.1, 4.6)) .field("id", "2") .endObject()).get(); client().prepareIndex("test").setId("3") .setSource(jsonBuilder().startObject() .field("query", geoPolygonQuery("field1", Arrays.asList(new GeoPoint(52.1, 4.4), new GeoPoint(52.3, 4.5), new GeoPoint(52.1, 4.6)))) .field("id", "3") .endObject()).get(); refresh(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject() .endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); } public void testPercolatorQueryExistingDocument() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject() .field("id", "1") .field("query", matchAllQuery()).endObject()) .get(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject() .field("id", "2") .field("query", matchQuery("field1", "value")).endObject()) .get(); client().prepareIndex("test").setId("3") .setSource(jsonBuilder().startObject() .field("id", "3") .field("query", boolQuery() .must(matchQuery("field1", "value")) .must(matchQuery("field2", "value"))).endObject()).get(); client().prepareIndex("test").setId("4").setSource("{\"id\": \"4\"}", XContentType.JSON).get(); client().prepareIndex("test").setId("5").setSource(XContentType.JSON, "id", "5", "field1", "value").get(); client().prepareIndex("test").setId("6").setSource(XContentType.JSON, "id", "6", "field1", "value", "field2", "value").get(); client().admin().indices().prepareRefresh().get(); logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", "test", "1", null, null, null)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); logger.info("percolating doc with 1 field"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", "test", "6", null, null, null)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); } public void testPercolatorQueryExistingDocumentSourceDisabled() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("_source", "enabled=false", "field1", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .get(); client().prepareIndex("test").setId("2").setSource("{}", XContentType.JSON).get(); client().admin().indices().prepareRefresh().get(); logger.info("percolating empty doc with source disabled"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", "test", "1", null, null, null)) .get(); }); assertThat(e.getMessage(), containsString("source disabled")); } public void testPercolatorSpecificQueries() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("id", "type=keyword", "field1", "type=text", "field2", "type=text", "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject() .field("id", "1") .field("query", multiMatchQuery("quick brown fox", "field1", "field2") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).endObject()) .get(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject() .field("id", "2") .field("query", spanNearQuery(spanTermQuery("field1", "quick"), 0) .addClause(spanTermQuery("field1", "brown")) .addClause(spanTermQuery("field1", "fox")) .inOrder(true) ).endObject()) .get(); client().admin().indices().prepareRefresh().get(); client().prepareIndex("test").setId("3") .setSource(jsonBuilder().startObject() .field("id", "3") .field("query", spanNotQuery( spanNearQuery(spanTermQuery("field1", "quick"), 0) .addClause(spanTermQuery("field1", "brown")) .addClause(spanTermQuery("field1", "fox")) .inOrder(true), spanNearQuery(spanTermQuery("field1", "the"), 0) .addClause(spanTermQuery("field1", "lazy")) .addClause(spanTermQuery("field1", "dog")) .inOrder(true)).dist(2) ).endObject()) .get(); // doesn't match client().prepareIndex("test").setId("4") .setSource(jsonBuilder().startObject() .field("id", "4") .field("query", spanNotQuery( spanNearQuery(spanTermQuery("field1", "quick"), 0) .addClause(spanTermQuery("field1", "brown")) .addClause(spanTermQuery("field1", "fox")) .inOrder(true), spanNearQuery(spanTermQuery("field1", "the"), 0) .addClause(spanTermQuery("field1", "lazy")) .addClause(spanTermQuery("field1", "dog")) .inOrder(true)).dist(3) ).endObject()) .get(); client().admin().indices().prepareRefresh().get(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "the quick brown fox jumps over the lazy dog") .field("field2", "the quick brown fox falls down into the well") .endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getScore(), equalTo(Float.NaN)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getScore(), equalTo(Float.NaN)); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getScore(), equalTo(Float.NaN)); } public void testPercolatorQueryWithHighlighting() throws Exception { StringBuilder fieldMapping = new StringBuilder("type=text") .append(",store=").append(randomBoolean()); if (randomBoolean()) { fieldMapping.append(",term_vector=with_positions_offsets"); } else if (randomBoolean()) { fieldMapping.append(",index_options=offsets"); } assertAcked(client().admin().indices().prepareCreate("test") .setMapping("id", "type=keyword", "field1", fieldMapping.toString(), "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject() .field("id", "1") .field("query", matchQuery("field1", "brown fox")).endObject()) .execute().actionGet(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject() .field("id", "2") .field("query", matchQuery("field1", "lazy dog")).endObject()) .execute().actionGet(); client().prepareIndex("test").setId("3") .setSource(jsonBuilder().startObject() .field("id", "3") .field("query", termQuery("field1", "jumps")).endObject()) .execute().actionGet(); client().prepareIndex("test").setId("4") .setSource(jsonBuilder().startObject() .field("id", "4") .field("query", termQuery("field1", "dog")).endObject()) .execute().actionGet(); client().prepareIndex("test").setId("5") .setSource(jsonBuilder().startObject() .field("id", "5") .field("query", termQuery("field1", "fox")).endObject()) .execute().actionGet(); client().admin().indices().prepareRefresh().get(); BytesReference document = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "The quick brown fox jumps over the lazy dog") .endObject()); SearchResponse searchResponse = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON)) .highlighter(new HighlightBuilder().field("field1")) .addSort("id", SortOrder.ASC) .get(); assertHitCount(searchResponse, 5); assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick <em>brown</em> <em>fox</em> jumps over the lazy dog")); assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the <em>lazy</em> <em>dog</em>")); assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox <em>jumps</em> over the lazy dog")); assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy <em>dog</em>")); assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps over the lazy dog")); BytesReference document1 = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "The quick brown fox jumps") .endObject()); BytesReference document2 = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "over the lazy dog") .endObject()); searchResponse = client().prepareSearch() .setQuery(boolQuery() .should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1")) .should(new PercolateQueryBuilder("query", document2, XContentType.JSON).setName("query2")) ) .highlighter(new HighlightBuilder().field("field1")) .addSort("id", SortOrder.ASC) .get(); logger.info("searchResponse={}", searchResponse); assertHitCount(searchResponse, 5); assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("query1_field1").fragments()[0].string(), equalTo("The quick <em>brown</em> <em>fox</em> jumps")); assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("query2_field1").fragments()[0].string(), equalTo("over the <em>lazy</em> <em>dog</em>")); assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("query1_field1").fragments()[0].string(), equalTo("The quick brown fox <em>jumps</em>")); assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("query2_field1").fragments()[0].string(), equalTo("over the lazy <em>dog</em>")); assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("query1_field1").fragments()[0].string(), equalTo("The quick brown <em>fox</em> jumps")); searchResponse = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) ), XContentType.JSON)) .highlighter(new HighlightBuilder().field("field1")) .addSort("id", SortOrder.ASC) .get(); assertHitCount(searchResponse, 5); assertThat(searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(1, 3))); assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("1_field1").fragments()[0].string(), equalTo("<em>fox</em>")); assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("3_field1").fragments()[0].string(), equalTo("<em>brown</em> <em>fox</em>")); assertThat(searchResponse.getHits().getAt(1).getFields().get("_percolator_document_slot").getValues(), equalTo(Collections.singletonList(0))); assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("0_field1").fragments()[0].string(), equalTo("<em>dog</em>")); assertThat(searchResponse.getHits().getAt(2).getFields().get("_percolator_document_slot").getValues(), equalTo(Collections.singletonList(2))); assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("2_field1").fragments()[0].string(), equalTo("<em>jumps</em>")); assertThat(searchResponse.getHits().getAt(3).getFields().get("_percolator_document_slot").getValues(), equalTo(Collections.singletonList(0))); assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("0_field1").fragments()[0].string(), equalTo("<em>dog</em>")); assertThat(searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(1, 3))); assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("1_field1").fragments()[0].string(), equalTo("<em>fox</em>")); assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("3_field1").fragments()[0].string(), equalTo("brown <em>fox</em>")); searchResponse = client().prepareSearch() .setQuery(boolQuery() .should(new PercolateQueryBuilder("query", Arrays.asList( BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()) ), XContentType.JSON).setName("query1")) .should(new PercolateQueryBuilder("query", Arrays.asList( BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) ), XContentType.JSON).setName("query2")) ) .highlighter(new HighlightBuilder().field("field1")) .addSort("id", SortOrder.ASC) .get(); logger.info("searchResponse={}", searchResponse); assertHitCount(searchResponse, 5); assertThat(searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot_query1").getValues(), equalTo(Collections.singletonList(1))); assertThat(searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot_query2").getValues(), equalTo(Collections.singletonList(1))); assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("query1_1_field1").fragments()[0].string(), equalTo("<em>fox</em>")); assertThat(searchResponse.getHits().getAt(0).getHighlightFields().get("query2_1_field1").fragments()[0].string(), equalTo("<em>brown</em> <em>fox</em>")); assertThat(searchResponse.getHits().getAt(1).getFields().get("_percolator_document_slot_query1").getValues(), equalTo(Collections.singletonList(0))); assertThat(searchResponse.getHits().getAt(1).getHighlightFields().get("query1_0_field1").fragments()[0].string(), equalTo("<em>dog</em>")); assertThat(searchResponse.getHits().getAt(2).getFields().get("_percolator_document_slot_query2").getValues(), equalTo(Collections.singletonList(0))); assertThat(searchResponse.getHits().getAt(2).getHighlightFields().get("query2_0_field1").fragments()[0].string(), equalTo("<em>jumps</em>")); assertThat(searchResponse.getHits().getAt(3).getFields().get("_percolator_document_slot_query1").getValues(), equalTo(Collections.singletonList(0))); assertThat(searchResponse.getHits().getAt(3).getHighlightFields().get("query1_0_field1").fragments()[0].string(), equalTo("<em>dog</em>")); assertThat(searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot_query1").getValues(), equalTo(Collections.singletonList(1))); assertThat(searchResponse.getHits().getAt(4).getFields().get("_percolator_document_slot_query2").getValues(), equalTo(Collections.singletonList(1))); assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("query1_1_field1").fragments()[0].string(), equalTo("<em>fox</em>")); assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("query2_1_field1").fragments()[0].string(), equalTo("brown <em>fox</em>")); } public void testTakePositionOffsetGapIntoAccount() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("field", "type=text,position_increment_gap=5", "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(4)).endObject()) .get(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject().field("query", new MatchPhraseQueryBuilder("field", "brown fox").slop(5)).endObject()) .get(); client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch().setQuery( new PercolateQueryBuilder("query", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}"), XContentType.JSON) ).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } public void testManyPercolatorFields() throws Exception { String queryFieldName = randomAlphaOfLength(8); assertAcked(client().admin().indices().prepareCreate("test1") .setMapping(queryFieldName, "type=percolator", "field", "type=keyword") ); assertAcked(client().admin().indices().prepareCreate("test2") .setMapping(queryFieldName, "type=percolator", "second_query_field", "type=percolator", "field", "type=keyword") ); assertAcked(client().admin().indices().prepareCreate("test3") .setMapping(jsonBuilder().startObject().startObject("_doc").startObject("properties") .startObject("field") .field("type", "keyword") .endObject() .startObject("object_field") .field("type", "object") .startObject("properties") .startObject(queryFieldName) .field("type", "percolator") .endObject() .endObject() .endObject() .endObject() .endObject().endObject()) ); } public void testWithMultiplePercolatorFields() throws Exception { String queryFieldName = randomAlphaOfLength(8); assertAcked(client().admin().indices().prepareCreate("test1") .setMapping(queryFieldName, "type=percolator", "field", "type=keyword")); assertAcked(client().admin().indices().prepareCreate("test2") .setMapping(jsonBuilder().startObject().startObject("_doc").startObject("properties") .startObject("field") .field("type", "keyword") .endObject() .startObject("object_field") .field("type", "object") .startObject("properties") .startObject(queryFieldName) .field("type", "percolator") .endObject() .endObject() .endObject() .endObject() .endObject().endObject()) ); // Acceptable: client().prepareIndex("test1").setId("1") .setSource(jsonBuilder().startObject().field(queryFieldName, matchQuery("field", "value")).endObject()) .get(); client().prepareIndex("test2").setId("1") .setSource(jsonBuilder().startObject().startObject("object_field") .field(queryFieldName, matchQuery("field", "value")) .endObject().endObject()) .get(); client().admin().indices().prepareRefresh().get(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field", "value").endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON)) .setIndices("test1") .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getIndex(), equalTo("test1")); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("object_field." + queryFieldName, source, XContentType.JSON)) .setIndices("test2") .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getIndex(), equalTo("test2")); // Unacceptable: MapperParsingException e = expectThrows(MapperParsingException.class, () -> { client().prepareIndex("test2").setId("1") .setSource(jsonBuilder().startObject().startArray("object_field") .startObject().field(queryFieldName, matchQuery("field", "value")).endObject() .startObject().field(queryFieldName, matchQuery("field", "value")).endObject() .endArray().endObject()) .get(); }); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query")); } public void testPercolateQueryWithNestedDocuments() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject().startObject("properties").startObject("query").field("type", "percolator").endObject() .startObject("id").field("type", "keyword").endObject() .startObject("companyname").field("type", "text").endObject().startObject("employee").field("type", "nested") .startObject("properties").startObject("name").field("type", "text").endObject().endObject().endObject().endObject() .endObject(); assertAcked(client().admin().indices().prepareCreate("test") .setMapping(mapping) ); client().prepareIndex("test").setId("q1").setSource(jsonBuilder().startObject() .field("id", "q1") .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND), ScoreMode.Avg) ).endObject()) .get(); // this query should never match as it doesn't use nested query: client().prepareIndex("test").setId("q2").setSource(jsonBuilder().startObject() .field("id", "q2") .field("query", QueryBuilders.matchQuery("employee.name", "virginia")).endObject()) .get(); client().admin().indices().prepareRefresh().get(); client().prepareIndex("test").setId("q3").setSource(jsonBuilder().startObject() .field("id", "q3") .field("query", QueryBuilders.matchAllQuery()).endObject()) .get(); client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "virginia potts").endObject() .startObject().field("name", "tony stark").endObject() .endArray() .endObject()), XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("q1")); assertThat(response.getHits().getAt(1).getId(), equalTo("q3")); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "notstark") .startArray("employee") .startObject().field("name", "virginia stark").endObject() .startObject().field("name", "tony stark").endObject() .endArray() .endObject()), XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()), XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "virginia potts").endObject() .startObject().field("name", "tony stark").endObject() .endArray() .endObject()), BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "peter parker").endObject() .startObject().field("name", "virginia potts").endObject() .endArray() .endObject()), BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "peter parker").endObject() .endArray() .endObject()) ), XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("q1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); assertThat(response.getHits().getAt(1).getId(), equalTo("q3")); assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1, 2))); } public void testPercolatorQueryViaMultiSearch() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("field1", "type=text", "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject()) .execute().actionGet(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject()) .execute().actionGet(); client().prepareIndex("test").setId("3") .setSource(jsonBuilder().startObject().field("query", boolQuery() .must(matchQuery("field1", "b")) .must(matchQuery("field1", "c")) ).endObject()) .execute().actionGet(); client().prepareIndex("test").setId("4") .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) .execute().actionGet(); client().prepareIndex("test").setId("5") .setSource(jsonBuilder().startObject().field("field1", "c").endObject()) .execute().actionGet(); client().admin().indices().prepareRefresh().get(); MultiSearchResponse response = client().prepareMultiSearch() .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), XContentType.JSON))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), XContentType.YAML))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(jsonBuilder().startObject().field("field1", "b c").endObject()), XContentType.JSON))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), XContentType.JSON))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null))) .add(client().prepareSearch("test") // non existing doc, so error element .setQuery(new PercolateQueryBuilder("query", "test", "6", null, null, null))) .get(); MultiSearchResponse.Item item = response.getResponses()[0]; assertHitCount(item.getResponse(), 2L); assertSearchHits(item.getResponse(), "1", "4"); assertThat(item.getFailureMessage(), nullValue()); item = response.getResponses()[1]; assertHitCount(item.getResponse(), 2L); assertSearchHits(item.getResponse(), "2", "4"); assertThat(item.getFailureMessage(), nullValue()); item = response.getResponses()[2]; assertHitCount(item.getResponse(), 4L); assertSearchHits(item.getResponse(), "1", "2", "3", "4"); assertThat(item.getFailureMessage(), nullValue()); item = response.getResponses()[3]; assertHitCount(item.getResponse(), 1L); assertSearchHits(item.getResponse(), "4"); assertThat(item.getFailureMessage(), nullValue()); item = response.getResponses()[4]; assertHitCount(item.getResponse(), 2L); assertSearchHits(item.getResponse(), "2", "4"); assertThat(item.getFailureMessage(), nullValue()); item = response.getResponses()[5]; assertThat(item.getResponse(), nullValue()); assertThat(item.getFailureMessage(), notNullValue()); assertThat(item.getFailureMessage(), containsString("[test/6] couldn't be found")); } public void testDisallowExpensiveQueries() throws IOException { try { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("id", "type=keyword", "field1", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject() .field("id", "1") .field("query", matchQuery("field1", "value")).endObject()) .get(); refresh(); // Execute with search.allow_expensive_queries = null => default value = false => success BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); // Set search.allow_expensive_queries to "false" => assert failure ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", false)); assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get()); assertEquals("[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", e.getCause().getMessage()); // Set search.allow_expensive_queries setting to "true" ==> success updateSettingsRequest = new ClusterUpdateSettingsRequest(); updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true)); assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); } finally { ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); } } public void testWrappedWithConstantScore() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("d", "type=date", "q", "type=percolator") ); client().prepareIndex("test").setId("1") .setSource(jsonBuilder().startObject().field("q", boolQuery().must(rangeQuery("d").gt("now")) ).endObject()) .execute().actionGet(); client().prepareIndex("test").setId("2") .setSource(jsonBuilder().startObject().field("q", boolQuery().must(rangeQuery("d").lt("now")) ).endObject()) .execute().actionGet(); client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch("test").setQuery(new PercolateQueryBuilder("q", BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), XContentType.JSON)).get(); assertEquals(1, response.getHits().getTotalHits().value); response = client().prepareSearch("test").setQuery(new PercolateQueryBuilder("q", BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), XContentType.JSON)).addSort("_doc", SortOrder.ASC).get(); assertEquals(1, response.getHits().getTotalHits().value); response = client().prepareSearch("test").setQuery(constantScoreQuery(new PercolateQueryBuilder("q", BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), XContentType.JSON))).get(); assertEquals(1, response.getHits().getTotalHits().value); } }
apache-2.0
nmoghadam/jbpm
jbpm-audit/src/main/java/org/jbpm/process/audit/query/NodeInstLogQueryBuilderImpl.java
2928
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.process.audit.query; import static org.kie.internal.query.QueryParameterIdentifiers.NODE_ID_LIST; import static org.kie.internal.query.QueryParameterIdentifiers.NODE_INSTANCE_ID_LIST; import static org.kie.internal.query.QueryParameterIdentifiers.NODE_NAME_LIST; import static org.kie.internal.query.QueryParameterIdentifiers.TYPE_LIST; import static org.kie.internal.query.QueryParameterIdentifiers.WORK_ITEM_ID_LIST; import java.util.List; import org.jbpm.process.audit.JPAAuditLogService; import org.jbpm.query.jpa.data.QueryWhere; import org.kie.api.runtime.CommandExecutor; import org.kie.api.runtime.manager.audit.NodeInstanceLog; import org.kie.internal.query.ParametrizedQuery; import org.kie.internal.runtime.manager.audit.query.NodeInstanceLogQueryBuilder; public class NodeInstLogQueryBuilderImpl extends AbstractAuditQueryBuilderImpl<NodeInstanceLogQueryBuilder, NodeInstanceLog> implements NodeInstanceLogQueryBuilder { public NodeInstLogQueryBuilderImpl(CommandExecutor cmdService) { super(cmdService); } public NodeInstLogQueryBuilderImpl(JPAAuditLogService jpaAuditService) { super(jpaAuditService); } @Override public NodeInstanceLogQueryBuilder nodeInstanceId( String... nodeInstanceId ) { addObjectParameter(NODE_INSTANCE_ID_LIST, "node instance id", nodeInstanceId); return this; } @Override public NodeInstanceLogQueryBuilder nodeId( String... nodeId ) { addObjectParameter(NODE_ID_LIST, "node id", nodeId); return this; } @Override public NodeInstanceLogQueryBuilder nodeName( String... name ) { addObjectParameter(NODE_NAME_LIST, "node name", name); return this; } @Override public NodeInstanceLogQueryBuilder nodeType( String... type ) { addObjectParameter(TYPE_LIST, "node type", type); return this; } @Override public NodeInstanceLogQueryBuilder workItemId( long... workItemId ) { addLongParameter(WORK_ITEM_ID_LIST, "work item id", workItemId); return this; } @Override protected Class<NodeInstanceLog> getResultType() { return NodeInstanceLog.class; } @Override protected Class<org.jbpm.process.audit.NodeInstanceLog> getQueryType() { return org.jbpm.process.audit.NodeInstanceLog.class; } }
apache-2.0
DMHP/jaggery
components/jaggery-core/org.jaggeryjs.jaggery.core/src/main/java/org/jaggeryjs/jaggery/core/websocket/JaggeryWebSocketServlet.java
1980
package org.jaggeryjs.jaggery.core.websocket; import org.apache.catalina.websocket.StreamInbound; import org.apache.catalina.websocket.WebSocketServlet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jaggeryjs.hostobjects.web.WebSocketHostObject; import org.jaggeryjs.jaggery.core.manager.CommonManager; import org.jaggeryjs.scriptengine.engine.JaggeryContext; import org.jaggeryjs.jaggery.core.manager.WebAppManager; import org.jaggeryjs.scriptengine.engine.RhinoEngine; import org.jaggeryjs.scriptengine.exceptions.ScriptException; import org.mozilla.javascript.Scriptable; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; public class JaggeryWebSocketServlet extends WebSocketServlet { private static final Log log = LogFactory.getLog(JaggeryWebSocketServlet.class); private WSMessageInBound wsMessageInBound = null; public void init(ServletConfig config) throws ServletException { super.init(config); } protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { CommonManager.getInstance().getEngine().enterContext(); } catch (ScriptException e) { log.error(e.getMessage(), e); throw new ServletException(e); } WebAppManager.execute(request, response); JaggeryContext context = CommonManager.getJaggeryContext(); Scriptable scope = context.getScope(); wsMessageInBound = new WSMessageInBound((WebSocketHostObject) scope.get("webSocket", scope)); RhinoEngine.exitContext(); super.doGet(request, response); } @Override protected StreamInbound createWebSocketInbound(String s, HttpServletRequest request) { return wsMessageInBound; } }
apache-2.0
ibinti/intellij-community
platform/funcTests/project1/module1/src/com/intellij/testProject/idea/ClassWithManyImports.java
2191
package com.intellij.testProject.idea; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; import java.*; import java.util.*; import java.applet.*; import java.awt.*; import java.beans.*; import java.io.*; import java.nio.*; import java.rmi.*; import java.sql.*; import java.text.*; public class ClassWithManyImports { }
apache-2.0
xiaofu/apache-hbase-0.94.10-read
src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
17120
/** * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.SplitTransaction; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.PairOfSameType; import org.junit.experimental.categories.Category; import org.mockito.Mockito; import static org.mockito.Mockito.when; @Category(SmallTests.class) public class TestCoprocessorInterface extends HBaseTestCase { static final Log LOG = LogFactory.getLog(TestCoprocessorInterface.class); static final String DIR = "test/build/data/TestCoprocessorInterface/"; private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static class CustomScanner implements RegionScanner { private RegionScanner delegate; public CustomScanner(RegionScanner delegate) { this.delegate = delegate; } @Override public boolean next(List<KeyValue> results) throws IOException { return delegate.next(results); } @Override public boolean next(List<KeyValue> results, String metric) throws IOException { return delegate.next(results, metric); } @Override public boolean next(List<KeyValue> result, int limit) throws IOException { return delegate.next(result, limit); } @Override public boolean next(List<KeyValue> result, int limit, String metric) throws IOException { return delegate.next(result, limit, metric); } @Override public boolean nextRaw(List<KeyValue> result, int limit, String metric) throws IOException { return delegate.nextRaw(result, limit, metric); } @Override public boolean nextRaw(List<KeyValue> result, String metric) throws IOException { return delegate.nextRaw(result, metric); } @Override public void close() throws IOException { delegate.close(); } @Override public HRegionInfo getRegionInfo() { return delegate.getRegionInfo(); } @Override public boolean isFilterDone() { return delegate.isFilterDone(); } @Override public boolean reseek(byte[] row) throws IOException { return false; } @Override public long getMvccReadPoint() { return delegate.getMvccReadPoint(); } } public static class CoprocessorImpl extends BaseRegionObserver { private boolean startCalled; private boolean stopCalled; private boolean preOpenCalled; private boolean postOpenCalled; private boolean preCloseCalled; private boolean postCloseCalled; private boolean preCompactCalled; private boolean postCompactCalled; private boolean preFlushCalled; private boolean postFlushCalled; private boolean preSplitCalled; private boolean postSplitCalled; private ConcurrentMap<String, Object> sharedData; @Override public void start(CoprocessorEnvironment e) { sharedData = ((RegionCoprocessorEnvironment)e).getSharedData(); // using new String here, so that there will be new object on each invocation sharedData.putIfAbsent("test1", new Object()); startCalled = true; } @Override public void stop(CoprocessorEnvironment e) { sharedData = null; stopCalled = true; } @Override public void preOpen(ObserverContext<RegionCoprocessorEnvironment> e) { preOpenCalled = true; } @Override public void postOpen(ObserverContext<RegionCoprocessorEnvironment> e) { postOpenCalled = true; } @Override public void preClose(ObserverContext<RegionCoprocessorEnvironment> e, boolean abortRequested) { preCloseCalled = true; } @Override public void postClose(ObserverContext<RegionCoprocessorEnvironment> e, boolean abortRequested) { postCloseCalled = true; } @Override public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store, InternalScanner scanner) { preCompactCalled = true; return scanner; } @Override public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store, StoreFile resultFile) { postCompactCalled = true; } @Override public void preFlush(ObserverContext<RegionCoprocessorEnvironment> e) { preFlushCalled = true; } @Override public void postFlush(ObserverContext<RegionCoprocessorEnvironment> e) { postFlushCalled = true; } @Override public void preSplit(ObserverContext<RegionCoprocessorEnvironment> e) { preSplitCalled = true; } @Override public void postSplit(ObserverContext<RegionCoprocessorEnvironment> e, HRegion l, HRegion r) { postSplitCalled = true; } @Override public RegionScanner postScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e, final Scan scan, final RegionScanner s) throws IOException { return new CustomScanner(s); } boolean wasStarted() { return startCalled; } boolean wasStopped() { return stopCalled; } boolean wasOpened() { return (preOpenCalled && postOpenCalled); } boolean wasClosed() { return (preCloseCalled && postCloseCalled); } boolean wasFlushed() { return (preFlushCalled && postFlushCalled); } boolean wasCompacted() { return (preCompactCalled && postCompactCalled); } boolean wasSplit() { return (preSplitCalled && postSplitCalled); } Map<String, Object> getSharedData() { return sharedData; } } public static class CoprocessorII extends BaseRegionObserver { private ConcurrentMap<String, Object> sharedData; @Override public void start(CoprocessorEnvironment e) { sharedData = ((RegionCoprocessorEnvironment)e).getSharedData(); sharedData.putIfAbsent("test2", new Object()); } @Override public void stop(CoprocessorEnvironment e) { sharedData = null; } @Override public void preGet(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, final List<KeyValue> results) throws IOException { if (1/0 == 1) { e.complete(); } } Map<String, Object> getSharedData() { return sharedData; } } public void testSharedData() throws IOException { byte [] tableName = Bytes.toBytes("testtable"); byte [][] families = { fam1, fam2, fam3 }; Configuration hc = initSplit(); HRegion region = initHRegion(tableName, getName(), hc, new Class<?>[]{}, families); for (int i = 0; i < 3; i++) { addContent(region, fam3); region.flushcache(); } region.compactStores(); byte [] splitRow = region.checkSplit(); assertNotNull(splitRow); HRegion [] regions = split(region, splitRow); for (int i = 0; i < regions.length; i++) { regions[i] = reopenRegion(regions[i], CoprocessorImpl.class, CoprocessorII.class); } Coprocessor c = regions[0].getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); Coprocessor c2 = regions[0].getCoprocessorHost(). findCoprocessor(CoprocessorII.class.getName()); Object o = ((CoprocessorImpl)c).getSharedData().get("test1"); Object o2 = ((CoprocessorII)c2).getSharedData().get("test2"); assertNotNull(o); assertNotNull(o2); // to coprocessors get different sharedDatas assertFalse(((CoprocessorImpl)c).getSharedData() == ((CoprocessorII)c2).getSharedData()); for (int i = 1; i < regions.length; i++) { c = regions[i].getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); c2 = regions[i].getCoprocessorHost(). findCoprocessor(CoprocessorII.class.getName()); // make sure that all coprocessor of a class have identical sharedDatas assertTrue(((CoprocessorImpl)c).getSharedData().get("test1") == o); assertTrue(((CoprocessorII)c2).getSharedData().get("test2") == o2); } // now have all Environments fail for (int i = 0; i < regions.length; i++) { try { Get g = new Get(regions[i].getStartKey()); regions[i].get(g, null); fail(); } catch (DoNotRetryIOException xc) { } assertNull(regions[i].getCoprocessorHost(). findCoprocessor(CoprocessorII.class.getName())); } c = regions[0].getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); assertTrue(((CoprocessorImpl)c).getSharedData().get("test1") == o); c = c2 = null; // perform a GC System.gc(); // reopen the region region = reopenRegion(regions[0], CoprocessorImpl.class, CoprocessorII.class); c = region.getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); // CPimpl is unaffected, still the same reference assertTrue(((CoprocessorImpl)c).getSharedData().get("test1") == o); c2 = region.getCoprocessorHost(). findCoprocessor(CoprocessorII.class.getName()); // new map and object created, hence the reference is different // hence the old entry was indeed removed by the GC and new one has been created assertFalse(((CoprocessorII)c2).getSharedData().get("test2") == o2); } public void testCoprocessorInterface() throws IOException { byte [] tableName = Bytes.toBytes("testtable"); byte [][] families = { fam1, fam2, fam3 }; Configuration hc = initSplit(); HRegion region = initHRegion(tableName, getName(), hc, new Class<?>[]{CoprocessorImpl.class}, families); for (int i = 0; i < 3; i++) { addContent(region, fam3); region.flushcache(); } region.compactStores(); byte [] splitRow = region.checkSplit(); assertNotNull(splitRow); HRegion [] regions = split(region, splitRow); for (int i = 0; i < regions.length; i++) { regions[i] = reopenRegion(regions[i], CoprocessorImpl.class); } region.close(); region.getLog().closeAndDelete(); Coprocessor c = region.getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); // HBASE-4197 Scan s = new Scan(); RegionScanner scanner = regions[0].getCoprocessorHost().postScannerOpen(s, regions[0].getScanner(s)); assertTrue(scanner instanceof CustomScanner); // this would throw an exception before HBASE-4197 scanner.next(new ArrayList<KeyValue>()); assertTrue("Coprocessor not started", ((CoprocessorImpl)c).wasStarted()); assertTrue("Coprocessor not stopped", ((CoprocessorImpl)c).wasStopped()); assertTrue(((CoprocessorImpl)c).wasOpened()); assertTrue(((CoprocessorImpl)c).wasClosed()); assertTrue(((CoprocessorImpl)c).wasFlushed()); assertTrue(((CoprocessorImpl)c).wasCompacted()); assertTrue(((CoprocessorImpl)c).wasSplit()); for (int i = 0; i < regions.length; i++) { regions[i].close(); regions[i].getLog().closeAndDelete(); c = region.getCoprocessorHost() .findCoprocessor(CoprocessorImpl.class.getName()); assertTrue("Coprocessor not started", ((CoprocessorImpl)c).wasStarted()); assertTrue("Coprocessor not stopped", ((CoprocessorImpl)c).wasStopped()); assertTrue(((CoprocessorImpl)c).wasOpened()); assertTrue(((CoprocessorImpl)c).wasClosed()); assertTrue(((CoprocessorImpl)c).wasCompacted()); } } HRegion reopenRegion(final HRegion closedRegion, Class<?> ... implClasses) throws IOException { //HRegionInfo info = new HRegionInfo(tableName, null, null, false); HRegion r = new HRegion(closedRegion); r.initialize(); // this following piece is a hack. currently a coprocessorHost // is secretly loaded at OpenRegionHandler. we don't really // start a region server here, so just manually create cphost // and set it to region. RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf); r.setCoprocessorHost(host); for (Class<?> implClass : implClasses) { host.load(implClass, Coprocessor.PRIORITY_USER, conf); } // we need to manually call pre- and postOpen here since the // above load() is not the real case for CP loading. A CP is // expected to be loaded by default from 1) configuration; or 2) // HTableDescriptor. If it's loaded after HRegion initialized, // the pre- and postOpen() won't be triggered automatically. // Here we have to call pre and postOpen explicitly. host.preOpen(); host.postOpen(); return r; } HRegion initHRegion (byte [] tableName, String callingMethod, Configuration conf, Class<?> [] implClasses, byte [][] families) throws IOException { HTableDescriptor htd = new HTableDescriptor(tableName); for(byte [] family : families) { htd.addFamily(new HColumnDescriptor(family)); } HRegionInfo info = new HRegionInfo(tableName, null, null, false); Path path = new Path(DIR + callingMethod); HRegion r = HRegion.createHRegion(info, path, conf, htd); // this following piece is a hack. RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf); r.setCoprocessorHost(host); for (Class<?> implClass : implClasses) { host.load(implClass, Coprocessor.PRIORITY_USER, conf); Coprocessor c = host.findCoprocessor(implClass.getName()); assertNotNull(c); } // Here we have to call pre and postOpen explicitly. host.preOpen(); host.postOpen(); return r; } Configuration initSplit() { // Always compact if there is more than one store file. TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 2); // Make lease timeout longer, lease checks less frequent TEST_UTIL.getConfiguration().setInt( "hbase.master.lease.thread.wakefrequency", 5 * 1000); TEST_UTIL.getConfiguration().setInt( "hbase.regionserver.lease.period", 10 * 1000); // Increase the amount of time between client retries TEST_UTIL.getConfiguration().setLong("hbase.client.pause", 15 * 1000); // This size should make it so we always split using the addContent // below. After adding all data, the first region is 1.3M TEST_UTIL.getConfiguration().setLong(HConstants.HREGION_MAX_FILESIZE, 1024 * 128); TEST_UTIL.getConfiguration().setBoolean("hbase.testing.nocluster", true); return TEST_UTIL.getConfiguration(); } private HRegion [] split(final HRegion r, final byte [] splitRow) throws IOException { HRegion[] regions = new HRegion[2]; SplitTransaction st = new SplitTransaction(r, splitRow); int i = 0; if (!st.prepare()) { // test fails. assertTrue(false); } try { Server mockServer = Mockito.mock(Server.class); when(mockServer.getConfiguration()).thenReturn( TEST_UTIL.getConfiguration()); PairOfSameType<HRegion> daughters = st.execute(mockServer, null); for (HRegion each_daughter: daughters) { regions[i] = each_daughter; i++; } } catch (IOException ioe) { LOG.info("Split transaction of " + r.getRegionNameAsString() + " failed:" + ioe.getMessage()); assertTrue(false); } catch (RuntimeException e) { LOG.info("Failed rollback of failed split of " + r.getRegionNameAsString() + e.getMessage()); } assertTrue(i == 2); return regions; } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); }
apache-2.0
kidaa/incubator-ignite
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/near/GridCachePartitionedGetAndTransformStoreSelfTest.java
1311
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.near; import org.apache.ignite.cache.*; import org.apache.ignite.internal.processors.cache.*; import static org.apache.ignite.cache.CacheMode.*; /** * Test get and transform for store with partitioned cache. */ public class GridCachePartitionedGetAndTransformStoreSelfTest extends GridCacheGetAndTransformStoreAbstractTest { /** {@inheritDoc} */ @Override protected CacheMode cacheMode() { return PARTITIONED; } }
apache-2.0
jack-moseley/gobblin
gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/TestImmutableFSJobCatalog.java
3054
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.runtime.job_catalog; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.testng.Assert; import org.testng.Assert.ThrowingRunnable; import org.testng.annotations.Test; import com.google.common.collect.ImmutableMap; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import org.apache.gobblin.configuration.ConfigurationKeys; /** * Unit tests for {@link ImmutableFSJobCatalog} */ public class TestImmutableFSJobCatalog { @Test public void testConfigAccessor() throws Exception { Config sysConfig1 = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder() .put(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, "/tmp") .build()); ImmutableFSJobCatalog.ConfigAccessor cfgAccessor1 = new ImmutableFSJobCatalog.ConfigAccessor(sysConfig1); Assert.assertEquals(cfgAccessor1.getJobConfDir(), "/tmp"); Assert.assertEquals(cfgAccessor1.getJobConfDirPath(), new Path("/tmp")); Assert.assertEquals(cfgAccessor1.getJobConfDirFileSystem().getClass(), FileSystem.get(new Configuration()).getClass()); Assert.assertEquals(cfgAccessor1.getPollingInterval(), ConfigurationKeys.DEFAULT_JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL); Config sysConfig2 = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder() .put(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY, "/tmp2") .put(ConfigurationKeys.JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL_KEY, 100) .build()); ImmutableFSJobCatalog.ConfigAccessor cfgAccessor2 = new ImmutableFSJobCatalog.ConfigAccessor(sysConfig2); Assert.assertEquals(cfgAccessor2.getJobConfDir(), "file:///tmp2"); Assert.assertEquals(cfgAccessor2.getJobConfDirPath(), new Path("file:///tmp2")); Assert.assertTrue(cfgAccessor2.getJobConfDirFileSystem() instanceof LocalFileSystem); Assert.assertEquals(cfgAccessor2.getPollingInterval(), 100); Assert.assertThrows(new ThrowingRunnable() { @Override public void run() throws Throwable { new ImmutableFSJobCatalog.ConfigAccessor(ConfigFactory.empty()); } }); } }
apache-2.0
jstrachan/quickstarts
sandbox/apps/infinispan-server/src/test/java/io/fabric8/apps/infinispan/InfinispanServerKubernetesTest.java
6787
/* * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.apps.infinispan; import io.fabric8.arquillian.kubernetes.Session; import io.fabric8.kubernetes.api.model.ReplicationController; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.ServiceList; import io.fabric8.kubernetes.api.model.ServicePort; import io.fabric8.kubernetes.api.model.ServiceSpec; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.utils.Asserts; import io.fabric8.utils.Block; import org.apache.http.client.fluent.Request; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicHeader; import org.assertj.core.api.Condition; import org.assertj.core.util.Preconditions; import org.jboss.arquillian.junit.Arquillian; import org.jboss.arquillian.test.api.ArquillianResource; import org.jgroups.Address; import org.jgroups.JChannel; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import java.io.IOException; import java.util.List; import static io.fabric8.kubernetes.api.KubernetesHelper.getName; import static io.fabric8.kubernetes.assertions.Assertions.assertThat; import static org.junit.Assert.assertTrue; @RunWith(Arquillian.class) public class InfinispanServerKubernetesTest { @ArquillianResource KubernetesClient client; @ArquillianResource Session session; @ArquillianResource ServiceList serviceList; @Test public void testInfinispan() throws Exception { assertThat(client).replicationControllers().haveAtLeast(1, new Condition<ReplicationController>() { @Override public boolean matches(ReplicationController replicationController) { return getName(replicationController).startsWith("infinispan"); } }); assertThat(client).services().haveAtLeast(1, new Condition<Service>() { @Override public boolean matches(Service serviceSchema) { return getName(serviceSchema).startsWith("infinispan-rest"); } }); assertThat(client).services().haveAtLeast(1, new Condition<Service>() { @Override public boolean matches(Service serviceSchema) { return getName(serviceSchema).startsWith("infinispan-remote"); } }); assertThat(client).services().haveAtLeast(1, new Condition<Service>() { @Override public boolean matches(Service serviceSchema) { return getName(serviceSchema).startsWith("infinispan-memcached"); } }); assertThat(client).services().haveAtLeast(1, new Condition<Service>() { @Override public boolean matches(Service serviceSchema) { return getName(serviceSchema).startsWith("infinispan-hotrod"); } }); } @Test public void testRestEndpoint() throws Exception { final String key = "key"; final String expectedValue = "value1"; final ServiceSpec restService = getRequiredServiceSpec("infinispan-rest"); List<ServicePort> ports = restService.getPorts(); assertTrue("Should have at least one port for service " + restService, ports.size() > 0); ServicePort firstServicePort = ports.get(0); final String serverURL = "http://" + restService.getClusterIP() + ":" + firstServicePort.getPort() + "/rest/default"; //TODO: We need to find a more elegant/robust way to know when the service is actually ready. Asserts.assertWaitFor(2 * 60 * 1000, new Block() { @Override public void invoke() throws Exception { String actualValue = null; try { putMethod(serverURL, key, expectedValue); actualValue = getMethod(serverURL, key); } catch (Exception e) { //ignore } finally { Assert.assertEquals(expectedValue, actualValue); } } }); } @Test public void testCluster() throws Exception { final JChannel channel = new JChannel(getClass().getResource("/jgroups.xml")); channel.connect("infinispan"); Asserts.assertWaitFor(2 * 60 * 1000, new Block() { @Override public void invoke() throws Exception { for (Address address : channel.getView().getMembers()) { System.out.println(address); } assertTrue(channel.getView().getMembers().size() > 3); } }); } /** * Get the {@link io.fabric8.kubernetes.api.model.Service} with the specified id. * @param id The id. * @return */ private ServiceSpec getRequiredServiceSpec(String id) { Preconditions.checkNotNullOrEmpty(id); for (Service s : serviceList.getItems()) { if (id.equals(getName(s))) { ServiceSpec spec = s.getSpec(); if (spec != null) { return spec; } } } throw new IllegalStateException("Service with id:"+id+" doesn't exists."); } /** * Method that puts a String value in cache. */ public static void putMethod(String serverURL, String key, String value) throws IOException { CloseableHttpClient client = HttpClients.createDefault(); HttpPut put = new HttpPut(serverURL + "/"+ key); put.addHeader(new BasicHeader("Content-Type", "text/plain")); put.setEntity(new StringEntity(value)); client.execute(put); } /** * Method that gets a value by a key in url as param value. */ private static String getMethod(String serverURL, String key) throws IOException { return Request.Get(serverURL + "/" + key) .addHeader("Content-Type", "text/plain") .execute() .returnContent().asString(); } }
apache-2.0
ariatemplates/ariatemplates
test/aria/jsunit/mock/RedirectToFile.js
920
/* * Copyright 2012 Amadeus s.a.s. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This mock Msg Handler is used by test cases in test.aria.jsunit */ Aria.classDefinition({ $classpath : "test.aria.jsunit.mock.RedirectToFile", $extends : "aria.core.IOFilter", $prototype : { onRequest : function (req) { this.redirectToFile(req, req.url + ".json"); } } });
apache-2.0
SiddharthChatrolaMs/azure-sdk-for-net
src/SDKs/Authorization/Authorization.Tests/Tests/GraphTestBase.cs
3720
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. using System; using System.Linq; using System.Collections.Generic; using System.Security.Cryptography; using Microsoft.Azure.Test.HttpRecorder; using System.Security.Cryptography.X509Certificates; using Microsoft.Rest.ClientRuntime.Azure.TestFramework; using System.Net; using Microsoft.Azure.Graph.RBAC.Models; using System.Threading; using Microsoft.Rest.Azure.OData; using Microsoft.Azure.Graph.RBAC; using Microsoft.Azure.Management.Authorization; namespace Authorization.Tests { public class TenantAndDomain { public TenantAndDomain() { } public string TenantId { get; set; } public string Domain { get; set; } } public class GraphTestBase : TestBase { public const string TenantIdKey = "TenantId"; public const string DomainKey = "Domain"; public TenantAndDomain GetTenantAndDomain() { SynchronizationContext.SetSynchronizationContext(new SynchronizationContext()); TenantAndDomain result = new TenantAndDomain(); if (HttpMockServer.Mode == HttpRecorderMode.Record) { var environment = TestEnvironmentFactory.GetTestEnvironment(); result.TenantId = environment.Tenant; result.Domain = environment.UserName .Split(new[] { "@" }, StringSplitOptions.RemoveEmptyEntries) .Last(); HttpMockServer.Variables[TenantIdKey] = result.TenantId; HttpMockServer.Variables[DomainKey] = result.Domain; } else if (HttpMockServer.Mode == HttpRecorderMode.Playback) { result.TenantId = HttpMockServer.Variables[TenantIdKey]; result.Domain = HttpMockServer.Variables[DomainKey]; } return result; } public GraphRbacManagementClient GetGraphClient(MockContext context, RecordedDelegatingHandler handler = null) { if (handler != null) { handler.IsPassThrough = true; } var client = handler == null ? context.GetGraphServiceClient<GraphRbacManagementClient>() : context.GetGraphServiceClient<GraphRbacManagementClient>(handlers: handler); client.TenantID = GetTenantAndDomain().TenantId; return client; } public User CreateUser(MockContext context,string username) { string upn = username + "@" + GetTenantAndDomain().Domain; UserCreateParameters parameter = new UserCreateParameters(); parameter.UserPrincipalName = upn; parameter.DisplayName = username; parameter.AccountEnabled = true; parameter.MailNickname = username + "test"; parameter.PasswordProfile = new PasswordProfile(); parameter.PasswordProfile.ForceChangePasswordNextLogin = false; parameter.PasswordProfile.Password = "Test12345"; return GetGraphClient(context).Users.Create(parameter); } public void DeleteUser(MockContext context, string upnOrObjectId) { GetGraphClient(context).Users.Delete(upnOrObjectId); } public ADGroup CreateGroup(MockContext context,string groupname) { string mailNickName = groupname + "tester"; GroupCreateParameters parameters = new GroupCreateParameters(); parameters.DisplayName = groupname; parameters.MailNickname = mailNickName; return GetGraphClient(context).Groups.Create(parameters); } public void DeleteGroup(MockContext context, string objectId) { GetGraphClient(context).Groups.Delete(objectId); } public void AddMember(MockContext context, ADGroup group, User user) { string memberUrl = string.Format("{0}{1}/directoryObjects/{2}", GetGraphClient(context).BaseUri.AbsoluteUri, GetGraphClient(context).TenantID, user.ObjectId); GetGraphClient(context).Groups.AddMember(group.ObjectId, new GroupAddMemberParameters(memberUrl)); } } }
apache-2.0
pacharawat/assignmentfacebook
connect-database/src/main/java/com/blogspot/na5cent/connectdb/Q3QueryWithParameters.java
1986
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.blogspot.na5cent.connectdb; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; /** * * @author anonymous */ public class Q3QueryWithParameters { private static final int EMPLOYEE_ID = 100; private static Connection getConnection() throws SQLException { return DriverManager.getConnection( C3DBConfig.getUrl(), C3DBConfig.getUsername(), C3DBConfig.getPassword() ); } public static void main(String[] args) throws ClassNotFoundException, SQLException { Class.forName(C3DBConfig.getDriver()); Connection connection = null; Statement statement = null; ResultSet resultSet = null; try { connection = getConnection(); statement = connection.createStatement(); resultSet = statement.executeQuery("SELECT * FROM Employees WHERE employee_id = " + EMPLOYEE_ID); while (resultSet.next()) { System.out.println("employee_id = " + resultSet.getInt("employee_id")); System.out.println("first_name = " + resultSet.getString("first_name")); System.out.println("last_name = " + resultSet.getString("last_name")); System.out.println("email = " + resultSet.getString("email")); System.out.println("------------------------------------------"); } } finally { if (resultSet != null) { resultSet.close(); } if (statement != null) { statement.close(); } if (connection != null) { connection.close(); } } } }
apache-2.0
darkforestzero/buck
test/com/facebook/buck/jvm/java/ExternalJavacEscaperTest.java
2741
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java; import static org.junit.Assume.assumeTrue; import com.facebook.buck.io.ExecutableFinder; import com.facebook.buck.testutil.integration.TemporaryPaths; import com.facebook.buck.testutil.integration.ProjectWorkspace; import com.facebook.buck.testutil.integration.TestDataHelper; import com.facebook.buck.util.environment.Platform; import com.google.common.collect.ImmutableMap; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; @RunWith(Parameterized.class) public class ExternalJavacEscaperTest { @Rule public TemporaryPaths tmp = new TemporaryPaths(); @Parameterized.Parameters(name = "{0}") public static Collection<Object[]> data() { return Arrays.asList( new Object[][]{ {"Poundsign", "pound#sign", true}, {"Whitespace", "space present", true}, {"SingleQuote", "quote'", true}, {"DoubleQuote", "double_quote\"", false} }); } @Parameterized.Parameter public String name; @Parameterized.Parameter(value = 1) public String badDir; @Parameterized.Parameter(value = 2) public boolean runOnWindows; @Test public void testSpecialCharsInSourcePath() throws IOException { assumeTrue(runOnWindows || Platform.detect() != Platform.WINDOWS); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario( this, "external_javac", tmp); workspace.setUp(); Path javac = new ExecutableFinder().getExecutable( Paths.get("javac"), ImmutableMap.copyOf(System.getenv())); assumeTrue(Files.exists(javac)); workspace.replaceFileContents(".buckconfig", "@JAVAC@", javac.toString()); workspace.move("java", badDir); workspace.runBuckCommand("clean").assertSuccess(); workspace.runBuckCommand( "build", String.format("//%s/com/example:example", badDir)) .assertSuccess(); } }
apache-2.0
wiltonlazary/arangodb
3rdParty/V8/v7.9.317/test/debugger/debug/regress/regress-1170187.js
3159
// Copyright 2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Make sure that the retreival of local variables are performed correctly even // when an adapter frame is present. Debug = debug.Debug let listenerCalled = false; let exceptionThrown = false; function checkName(name) { const validNames = new Set([ 'a', 'b', 'c', 'x', 'y' ]); assertTrue(validNames.has(name)); } function checkValue(value) { assertEquals(void 0, value); } function listener(event, exec_state, event_data, data) { try { if (event == Debug.DebugEvent.Break) { var local0Name = exec_state.frame(0).localName(0); var local1Name = exec_state.frame(0).localName(1); var local2Name = exec_state.frame(0).localName(2); checkName(local0Name); checkName(local1Name); checkName(local2Name); var local0Value = exec_state.frame(0).localValue(0).value(); var local1Value = exec_state.frame(0).localValue(1).value(); var local2Value = exec_state.frame(0).localValue(2).value(); checkValue(local0Value); checkValue(local1Value); checkValue(local2Value); listenerCalled = true; } } catch (e) { exceptionThrown = true; }; }; // Add the debug event listener. Debug.setListener(listener); // Call a function with local variables passing a different number parameters // that the number of arguments. (function(x,y){ var a,b,c; // Make sure a, b, and c are used. a,b,c; debugger; return 3 })() // Make sure that the debug event listener vas invoked (again). assertTrue(listenerCalled); assertFalse(exceptionThrown, "exception in listener")
apache-2.0
JT5D/blk-game
src/blk/env/blocks/dirtblock.js
2166
/** * Copyright 2012 Google, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ goog.provide('blk.env.blocks.DirtBlock'); goog.require('blk.env.BlockFlags'); goog.require('blk.env.Face'); goog.require('blk.env.blocks.BlockID'); goog.require('blk.env.blocks.SolidBlock'); goog.require('blk.env.materials'); /** * Dirt block. * * @constructor * @extends {blk.env.blocks.SolidBlock} */ blk.env.blocks.DirtBlock = function() { goog.base(this, blk.env.blocks.BlockID.DIRT, 'Dirt', blk.env.materials.ground, blk.env.BlockFlags.BREAKABLE | blk.env.BlockFlags.COLLIDABLE, blk.env.blocks.DirtBlock.DIRT_BOTTOM_); }; goog.inherits(blk.env.blocks.DirtBlock, blk.env.blocks.SolidBlock); /** * @private * @const * @type {number} */ blk.env.blocks.DirtBlock.GRASS_TOP_ = 0;//98; /** * @private * @const * @type {number} */ blk.env.blocks.DirtBlock.DIRT_BOTTOM_ = 2;//160; /** * @private * @const * @type {number} */ blk.env.blocks.DirtBlock.MIX_SIDE_ = 3;//114; /** * @override */ blk.env.blocks.DirtBlock.prototype.getFaceSlot = function(x, y, z, face, light, attrs) { var hasGrass = !!(attrs & 0x1); if (hasGrass) { switch (face) { case blk.env.Face.POS_Z: case blk.env.Face.NEG_Z: case blk.env.Face.POS_X: case blk.env.Face.NEG_X: return blk.env.blocks.DirtBlock.MIX_SIDE_; case blk.env.Face.POS_Y: return blk.env.blocks.DirtBlock.GRASS_TOP_; default: case blk.env.Face.NEG_Y: return blk.env.blocks.DirtBlock.DIRT_BOTTOM_; } } else { return blk.env.blocks.DirtBlock.DIRT_BOTTOM_; } };
apache-2.0
wikimedia/phabricator
src/infrastructure/customfield/standard/PhabricatorStandardCustomFieldText.php
1973
<?php final class PhabricatorStandardCustomFieldText extends PhabricatorStandardCustomField { public function getFieldType() { return 'text'; } public function buildFieldIndexes() { $indexes = array(); $value = $this->getFieldValue(); if (strlen($value)) { $indexes[] = $this->newStringIndex($value); } return $indexes; } public function readApplicationSearchValueFromRequest( PhabricatorApplicationSearchEngine $engine, AphrontRequest $request) { return $request->getStr($this->getFieldKey()); } public function applyApplicationSearchConstraintToQuery( PhabricatorApplicationSearchEngine $engine, PhabricatorCursorPagedPolicyAwareQuery $query, $value) { if (strlen($value)) { $query->withApplicationSearchContainsConstraint( $this->newStringIndex(null), $value); } } public function appendToApplicationSearchForm( PhabricatorApplicationSearchEngine $engine, AphrontFormView $form, $value) { $form->appendChild( id(new AphrontFormTextControl()) ->setLabel($this->getFieldName()) ->setName($this->getFieldKey()) ->setValue($value)); } public function shouldAppearInHerald() { return true; } public function getHeraldFieldConditions() { return array( HeraldAdapter::CONDITION_CONTAINS, HeraldAdapter::CONDITION_NOT_CONTAINS, HeraldAdapter::CONDITION_IS, HeraldAdapter::CONDITION_IS_NOT, HeraldAdapter::CONDITION_REGEXP, HeraldAdapter::CONDITION_NOT_REGEXP, ); } public function getHeraldFieldStandardType() { return HeraldField::STANDARD_TEXT; } protected function getHTTPParameterType() { return new AphrontStringHTTPParameterType(); } public function getConduitEditParameterType() { return new ConduitStringParameterType(); } protected function newExportFieldType() { return new PhabricatorStringExportField(); } }
apache-2.0
mbroadst/debian-qpid-proton
proton-j/src/main/resources/cssl.py
3207
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from org.apache.qpid.proton import Proton from org.apache.qpid.proton.engine import SslDomain from cerror import * # from proton/ssl.h PN_SSL_MODE_CLIENT = 1 PN_SSL_MODE_SERVER = 2 PN_SSL_RESUME_UNKNOWN = 0 PN_SSL_RESUME_NEW = 1 PN_SSL_RESUME_REUSED = 2 PN_SSL_VERIFY_NULL=0 PN_SSL_VERIFY_PEER=1 PN_SSL_ANONYMOUS_PEER=2 PN_SSL_VERIFY_PEER_NAME=3 PN_SSL_MODE_J2P = { SslDomain.Mode.CLIENT: PN_SSL_MODE_CLIENT, SslDomain.Mode.SERVER: PN_SSL_MODE_SERVER } PN_SSL_MODE_P2J = { PN_SSL_MODE_CLIENT: SslDomain.Mode.CLIENT, PN_SSL_MODE_SERVER: SslDomain.Mode.SERVER } def pn_ssl_present(): return True def pn_ssl_domain(mode): domain = Proton.sslDomain() domain.init(PN_SSL_MODE_P2J[mode]) return domain def pn_ssl_domain_set_credentials(domain, certificate_file, private_key_file, password): domain.setCredentials(certificate_file, private_key_file, password) return 0 def pn_ssl_domain_set_trusted_ca_db(domain, trusted_db): domain.setTrustedCaDb(trusted_db) return 0 PN_VERIFY_MODE_J2P = { None: PN_SSL_VERIFY_NULL, SslDomain.VerifyMode.VERIFY_PEER: PN_SSL_VERIFY_PEER, SslDomain.VerifyMode.VERIFY_PEER_NAME: PN_SSL_VERIFY_PEER_NAME, SslDomain.VerifyMode.ANONYMOUS_PEER: PN_SSL_ANONYMOUS_PEER } PN_VERIFY_MODE_P2J = { PN_SSL_VERIFY_NULL: None, PN_SSL_VERIFY_PEER: SslDomain.VerifyMode.VERIFY_PEER, PN_SSL_VERIFY_PEER_NAME: SslDomain.VerifyMode.VERIFY_PEER_NAME, PN_SSL_ANONYMOUS_PEER: SslDomain.VerifyMode.ANONYMOUS_PEER } def pn_ssl_domain_set_peer_authentication(domain, mode, trusted=None): domain.setPeerAuthentication(PN_VERIFY_MODE_P2J[mode]) if trusted: domain.setTrustedCaDb(trusted) return 0 def pn_ssl_domain_allow_unsecured_client(domain): domain.allowUnsecuredClient(True) return 0 class pn_ssl_wrapper: def __init__(self, transport): self.impl = None self.transport = transport def pn_ssl(transport): if getattr(transport, "ssl", None) is not None: return transport.ssl else: transport.ssl = pn_ssl_wrapper(transport) return transport.ssl def pn_ssl_init(ssl, domain, session_id): # XXX: session_id ssl.impl = ssl.transport.impl.ssl(domain, None) def pn_ssl_resume_status(ssl): raise Skipped() def pn_ssl_get_cipher_name(ssl, size): name = ssl.impl.getCipherName() return (bool(name), name) def pn_ssl_get_protocol_name(ssl, size): name = ssl.impl.getProtocolName() return (bool(name), name)
apache-2.0
adeshr/qds-sdk-py
qds_sdk/group.py
9599
import json from qds_sdk.qubole import Qubole from qds_sdk.resource import Resource from argparse import ArgumentParser class GroupCmdLine: """ qds_sdk.GroupCmdLine is the interface used by qds.py. """ @staticmethod def parsers(): argparser = ArgumentParser(prog="qds.py group", description="Client to manage Groups in Qubole Data Service.") subparsers = argparser.add_subparsers() #Create create = subparsers.add_parser("create", help="Creates a new Group") create.add_argument("--name", dest="name", required=True, help="Name of the new Group.") create.add_argument("--members", dest="members", help="List of User ids to be added in this new Group.") create.add_argument("--roles", dest="roles", help="List of Role ids to be attached with this new Group.") create.set_defaults(func=GroupCmdLine.create) #List list = subparsers.add_parser("list", help="List all Groups") list.add_argument("--per-page", dest="per_page", help="Number of items per page") list.add_argument("--page", dest="page", help="Page Number") list.set_defaults(func=GroupCmdLine.list) #View view = subparsers.add_parser("view", help="View a specific Group") view.add_argument("id", help="Numeric id of the Group") view.set_defaults(func=GroupCmdLine.view) #Update update = subparsers.add_parser("update", help="Update a specific Group's name") update.add_argument("--name", dest="name", help="New name of the Group") update.add_argument("--members", dest="members", help="List of User ids to be added in this Group") update.add_argument("--roles", dest="roles", help="List of Role ids to be attached with this Group.") update.add_argument("--remove-members", dest="remove_members", help="List of User ids to be removed from this Group") update.add_argument("--remove-roles", dest="remove_roles", help="List of Role ids to be detached from this Group") update.add_argument("id", help="Numeric id of the Group") update.set_defaults(func=GroupCmdLine.update) #Delete delete = subparsers.add_parser("delete", help="Delete a Group") delete.add_argument("id", help="Numeric id of the Group to be deleted") delete.set_defaults(func=GroupCmdLine.delete) #duplicate duplicate = subparsers.add_parser("duplicate", help="Duplicates/Clones a group") duplicate.add_argument("id", help="Numeric id of the Group to be Cloned") duplicate.add_argument("--name", dest="name", required=False, help="Name of the new group") duplicate.set_defaults(func=GroupCmdLine.duplicate) #Add user add_user = subparsers.add_parser("add-users", help="Add Users to the Group") add_user.add_argument("id", help="Numeric id of the group") add_user.add_argument("user_ids", help="User IDs of the Users to be added in this Group") add_user.set_defaults(func=GroupCmdLine.add_users) #Remove user remove_user = subparsers.add_parser("remove-users", help="Remove Users from the Group") remove_user.add_argument("id", help="Numeric id of the group") remove_user.add_argument("user_ids", help="User IDs of the Users to be removed from this Group") remove_user.set_defaults(func=GroupCmdLine.remove_users) #List roles for a group list_roles = subparsers.add_parser("list-roles", help="List all Roles of a Group ") list_roles.add_argument("id", help="Numeric id of the group") list_roles.set_defaults(func=GroupCmdLine.list_roles) #Add role add_role = subparsers.add_parser("add-roles", help="Attach Roles to the Group") add_role.add_argument("id", help="Numeric id of the group") add_role.add_argument("role_id", help="Please provide the Role IDs, which you likes to attach with this Group.") add_role.set_defaults(func=GroupCmdLine.add_roles) #Remove role remove_role = subparsers.add_parser("remove-roles", help="Detach Roles from the Group") remove_role.add_argument("id", help="Numeric id of the group") remove_role.add_argument("role_id", help="Please provide the Role IDs, which you likes to detach from this Group.") remove_role.set_defaults(func=GroupCmdLine.remove_roles) #List users for a group list_users = subparsers.add_parser("list-users", help="List all Users of a Group ") list_users.add_argument("id", help="Numeric id of the group") list_users.set_defaults(func=GroupCmdLine.list_users) return argparser @staticmethod def run(args): parser = GroupCmdLine.parsers() parsed = parser.parse_args(args) return parsed.func(parsed) @staticmethod def create(args): group = Group.create(name=args.name, members=args.members, roles=args.roles) return json.dumps(group.attributes, sort_keys=True, indent=4) @staticmethod def list(args): grouplist = Group.list(args.page, args.per_page) return json.dumps(grouplist, default=lambda o: o.attributes, sort_keys=True, indent=4) @staticmethod def view(args): group = Group.find(args.id) return json.dumps(group.attributes, sort_keys=True, indent=4) @staticmethod def update(args): options = {} if args.name is not None: options["name"] = args.name if args.members is not None: options["members"] = args.members if args.roles is not None: options["roles"] = args.roles if args.remove_members is not None: options["removed_members"] = args.remove_members if args.remove_roles is not None: options["removed_roles"] = args.remove_roles return json.dumps(Group.update(args.id, **options), sort_keys=True, indent=4) @staticmethod def delete(args): return json.dumps(Group.delete(args.id), sort_keys=True, indent=4) @staticmethod def add_users(args): options = {} if args.user_ids is not None: options["members"] = args.user_ids return json.dumps(Group.update(args.id, **options), sort_keys=True, indent=4) @staticmethod def remove_users(args): options = {} if args.user_ids is not None: options["removed_members"] = args.user_ids return json.dumps(Group.update(args.id, **options), sort_keys=True, indent=4) @staticmethod def duplicate(args): options = {} if args.name is not None: options["name"] = args.name return json.dumps(Group.duplicate(args.id, **options), sort_keys=True, indent=4) @staticmethod def list_roles(args): return json.dumps(Group.list_roles(args.id), sort_keys=True, indent=4) @staticmethod def list_users(args): return json.dumps(Group.list_users(args.id), sort_keys=True, indent=4) @staticmethod def add_roles(args): options = {} if args.role_id is not None: options["roles"] = args.role_id return json.dumps(Group.update(args.id, **options), sort_keys=True, indent=4) @staticmethod def remove_roles(args): options = {} if args.role_id is not None: options["removed_roles"] = args.role_id return json.dumps(Group.update(args.id, **options), sort_keys=True, indent=4) class Group(Resource): """ qds_sdk.Group is the base Qubole Group class. """ """ all commands use the /group endpoint""" rest_entity_path = "groups" @staticmethod def list(page = None, per_page = None): conn = Qubole.agent() url_path = Group.rest_entity_path page_attr = [] if page is not None: page_attr.append("page=%s" % page) if per_page is not None: page_attr.append("per_page=%s" % per_page) if page_attr: url_path = "%s?%s" % (Group.rest_entity_path, "&".join(page_attr)) groupjson = conn.get(url_path) grouplist = [] for s in groupjson["groups"]: grouplist.append(Group(s)) return grouplist @staticmethod def update(group_id, **kwargs): conn = Qubole.agent() url_path = "groups/%s" % group_id return conn.put(url_path, data=kwargs) @staticmethod def delete(group_id): conn = Qubole.agent() url_path = "groups/%s" % group_id return conn.delete(url_path) @staticmethod def add_user(group_id, user_id): conn = Qubole.agent() url_path = "groups/%s/qbol_users/%s/add" % (group_id, user_id) return conn.put(url_path) @staticmethod def remove_user(group_id, user_id): conn = Qubole.agent() url_path = "groups/%s/qbol_users/%s/remove" % (group_id, user_id) return conn.put(url_path) @staticmethod def duplicate(group_id, **kwargs): conn = Qubole.agent() url_path = "groups/%s/duplicate" % group_id return conn.post(url_path, data=kwargs) @staticmethod def list_roles(group_id): conn = Qubole.agent() url_path = "groups/%s/roles" % group_id return conn.get(url_path) @staticmethod def list_users(group_id): conn = Qubole.agent() url_path = "groups/%s/qbol_users" % group_id return conn.get(url_path)
apache-2.0
zsyzsyhao/zstack
sdk/src/main/java/org/zstack/sdk/CreateEipResult.java
269
package org.zstack.sdk; public class CreateEipResult { public EipInventory inventory; public void setInventory(EipInventory inventory) { this.inventory = inventory; } public EipInventory getInventory() { return this.inventory; } }
apache-2.0
rmarting/camel
platforms/spring-boot/components-starter/camel-aws-starter/src/main/java/org/apache/camel/component/aws/kms/springboot/KMSComponentAutoConfiguration.java
6086
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws.kms.springboot; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Generated; import org.apache.camel.CamelContext; import org.apache.camel.component.aws.kms.KMSComponent; import org.apache.camel.spi.ComponentCustomizer; import org.apache.camel.spi.HasId; import org.apache.camel.spring.boot.CamelAutoConfiguration; import org.apache.camel.spring.boot.ComponentConfigurationProperties; import org.apache.camel.spring.boot.util.CamelPropertiesHelper; import org.apache.camel.spring.boot.util.ConditionalOnCamelContextAndAutoConfigurationBeans; import org.apache.camel.spring.boot.util.GroupCondition; import org.apache.camel.spring.boot.util.HierarchicalPropertiesEvaluator; import org.apache.camel.util.IntrospectionSupport; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; /** * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @Configuration @Conditional({ConditionalOnCamelContextAndAutoConfigurationBeans.class, KMSComponentAutoConfiguration.GroupConditions.class}) @AutoConfigureAfter(CamelAutoConfiguration.class) @EnableConfigurationProperties({ComponentConfigurationProperties.class, KMSComponentConfiguration.class}) public class KMSComponentAutoConfiguration { private static final Logger LOGGER = LoggerFactory .getLogger(KMSComponentAutoConfiguration.class); @Autowired private ApplicationContext applicationContext; @Autowired private CamelContext camelContext; @Autowired private KMSComponentConfiguration configuration; @Autowired(required = false) private List<ComponentCustomizer<KMSComponent>> customizers; static class GroupConditions extends GroupCondition { public GroupConditions() { super("camel.component", "camel.component.aws-kms"); } } @Lazy @Bean(name = "aws-kms-component") @ConditionalOnMissingBean(KMSComponent.class) public KMSComponent configureKMSComponent() throws Exception { KMSComponent component = new KMSComponent(); component.setCamelContext(camelContext); Map<String, Object> parameters = new HashMap<>(); IntrospectionSupport.getProperties(configuration, parameters, null, false); for (Map.Entry<String, Object> entry : parameters.entrySet()) { Object value = entry.getValue(); Class<?> paramClass = value.getClass(); if (paramClass.getName().endsWith("NestedConfiguration")) { Class nestedClass = null; try { nestedClass = (Class) paramClass.getDeclaredField( "CAMEL_NESTED_CLASS").get(null); HashMap<String, Object> nestedParameters = new HashMap<>(); IntrospectionSupport.getProperties(value, nestedParameters, null, false); Object nestedProperty = nestedClass.newInstance(); CamelPropertiesHelper.setCamelProperties(camelContext, nestedProperty, nestedParameters, false); entry.setValue(nestedProperty); } catch (NoSuchFieldException e) { } } } CamelPropertiesHelper.setCamelProperties(camelContext, component, parameters, false); if (ObjectHelper.isNotEmpty(customizers)) { for (ComponentCustomizer<KMSComponent> customizer : customizers) { boolean useCustomizer = (customizer instanceof HasId) ? HierarchicalPropertiesEvaluator.evaluate( applicationContext.getEnvironment(), "camel.component.customizer", "camel.component.aws-kms.customizer", ((HasId) customizer).getId()) : HierarchicalPropertiesEvaluator.evaluate( applicationContext.getEnvironment(), "camel.component.customizer", "camel.component.aws-kms.customizer"); if (useCustomizer) { LOGGER.debug("Configure component {}, with customizer {}", component, customizer); customizer.customize(component); } } } return component; } }
apache-2.0
ionelmc/python-cookiepatcher
ci/bootstrap.py
2886
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import os import subprocess import sys from os.path import abspath from os.path import dirname from os.path import exists from os.path import join base_path = dirname(dirname(abspath(__file__))) def check_call(args): print("+", *args) subprocess.check_call(args) def exec_in_env(): env_path = join(base_path, ".tox", "bootstrap") if sys.platform == "win32": bin_path = join(env_path, "Scripts") else: bin_path = join(env_path, "bin") if not exists(env_path): import subprocess print("Making bootstrap env in: {0} ...".format(env_path)) try: check_call([sys.executable, "-m", "venv", env_path]) except subprocess.CalledProcessError: try: check_call([sys.executable, "-m", "virtualenv", env_path]) except subprocess.CalledProcessError: check_call(["virtualenv", env_path]) print("Installing `jinja2` into bootstrap environment...") check_call([join(bin_path, "pip"), "install", "jinja2", "tox"]) python_executable = join(bin_path, "python") if not os.path.exists(python_executable): python_executable += '.exe' print("Re-executing with: {0}".format(python_executable)) print("+ exec", python_executable, __file__, "--no-env") os.execv(python_executable, [python_executable, __file__, "--no-env"]) def main(): import jinja2 print("Project path: {0}".format(base_path)) jinja = jinja2.Environment( loader=jinja2.FileSystemLoader(join(base_path, "ci", "templates")), trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True ) tox_environments = [ line.strip() # 'tox' need not be installed globally, but must be importable # by the Python that is running this script. # This uses sys.executable the same way that the call in # cookiecutter-pylibrary/hooks/post_gen_project.py # invokes this bootstrap.py itself. for line in subprocess.check_output([sys.executable, '-m', 'tox', '--listenvs'], universal_newlines=True).splitlines() ] tox_environments = [line for line in tox_environments if line.startswith('py')] for name in os.listdir(join("ci", "templates")): with open(join(base_path, name), "w") as fh: fh.write(jinja.get_template(name).render(tox_environments=tox_environments)) print("Wrote {}".format(name)) print("DONE.") if __name__ == "__main__": args = sys.argv[1:] if args == ["--no-env"]: main() elif not args: exec_in_env() else: print("Unexpected arguments {0}".format(args), file=sys.stderr) sys.exit(1)
bsd-2-clause
bdhess/homebrew-cask
Casks/maltego-classic.rb
391
cask 'maltego-classic' do version '4.0.8.9246' sha256 '9a36647f54760b0bba40634d19184470d5cdd073d3e4d772fdac3281c6b955fa' url "https://www.paterva.com/malv#{version.major}/classic/Maltego.v#{version}.dmg" name 'Paterva Maltego' homepage 'https://www.paterva.com/web7/buy/maltego-clients/maltego.php' license :closed app "Maltego Classic v#{version.major_minor_patch}.app" end
bsd-2-clause