repo_name
stringlengths
4
116
path
stringlengths
3
942
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
novaramedia/novaramedia-com
vendor/fightbulc/moment/src/Locales/en_GB.php
2478
<?php // locale: great britain english (en-gb) // author: Chris Gedrim https://github.com/chrisgedrim return array( "months" => explode('_', 'January_February_March_April_May_June_July_August_September_October_November_December'), "monthsNominative" => explode('_', 'January_February_March_April_May_June_July_August_September_October_November_December'), "monthsShort" => explode('_', 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'), "weekdays" => explode('_', 'Monday_Tuesday_Wednesday_Thursday_Friday_Saturday_Sunday'), "weekdaysShort" => explode('_', 'Mon_Tue_Wed_Thu_Fri_Sat_Sun'), "calendar" => array( "sameDay" => '[Today]', "nextDay" => '[Tomorrow]', "lastDay" => '[Yesterday]', "lastWeek" => '[Last] l', "sameElse" => 'l', "withTime" => '[at] H:i', "default" => 'd/m/Y', ), "relativeTime" => array( "future" => 'in %s', "past" => '%s ago', "s" => 'a few seconds', "ss" => '%d seconds', "m" => 'a minute', "mm" => '%d minutes', "h" => 'an hour', "hh" => '%d hours', "d" => 'a day', "dd" => '%d days', "M" => 'a month', "MM" => '%d months', "y" => 'a year', "yy" => '%d years', ), "ordinal" => function ($number) { $n = $number % 100; $ends = array('th', 'st', 'nd', 'rd', 'th', 'th', 'th', 'th', 'th', 'th'); if ($n >= 11 && $n <= 13) { return $number . '[th]'; } return $number . '[' . $ends[$number % 10] . ']'; }, "week" => array( "dow" => 1, // Monday is the first day of the week. "doy" => 4 // The week that contains Jan 4th is the first week of the year. ), "customFormats" => array( "LT" => "G:i", // 22:00 "LTS" => "G:i:s", // 22:00:00 "L" => "d/m/Y", // 12/06/2010 "l" => "j/n/Y", // 12/6/2010 "LL" => "j F Y", // 12 June 2010 "ll" => "j M Y", // 12 Jun 2010 "LLL" => "j F Y G:i", // 12 June 2010 22:00 "lll" => "j M Y G:i", // 12 Jun 2010 22:00 "LLLL" => "l, j F F Y G:i", // Saturday, 12 June June 2010 22:00 "llll" => "D, j M Y G:i", // Sat, 12 Jun 2010 22:00 ), );
apache-2.0
chtyim/cdap
cdap-api/src/main/java/co/cask/cdap/api/dataset/lib/TimePartitionedFileSet.java
4051
/* * Copyright © 2014-2015 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap.api.dataset.lib; import co.cask.cdap.api.annotation.Beta; import co.cask.cdap.api.dataset.DataSetException; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; /** * Represents a dataset that is split into partitions that can be uniquely addressed * by time. Each partition is a path in a file set, with a timestamp attached as meta data. * The timestamp is mapped to a partition key of a {@link co.cask.cdap.api.dataset.lib.PartitionedFileSet} * with five integer partitioning fields: the year, month, day, hour and minute. Partitions can * be retrieved using time range or using a {@link co.cask.cdap.api.dataset.lib.PartitionFilter}. * <p> * The granularity of time is in minutes, that is, any seconds or milliseconds after the * full minute is ignored for the partition keys. That means, there can not be be two partitions * in the same minute. Also, when retrieving partitions via time or time range using * {@link #getPartitionByTime}, {@link #getPartitionsByTime}, or when writing a partition using * {@link #getPartitionOutput}, the seconds and milliseconds on the * time or time range are ignored. * <p> * This dataset can be made available for querying with SQL (explore). This is enabled through dataset * properties when the dataset is created. See {@link co.cask.cdap.api.dataset.lib.FileSetProperties} * for details. If it is enabled for explore, a Hive external table will be created when the dataset is * created. The Hive table is partitioned by year, month, day, hour and minute. */ @Beta public interface TimePartitionedFileSet extends PartitionedFileSet { /** * Add a partition for a given time, stored at a given path (relative to the file set's base path). */ void addPartition(long time, String path); /** * Add a partition for a given time, stored at a given path (relative to the file set's base path), * with given metadata. */ void addPartition(long time, String path, Map<String, String> metadata); /** * Adds a new metadata entry for a particular partition. * Note that existing entries can not be updated. * @throws DataSetException in case an attempt is made to update existing entries. */ void addMetadata(long time, String metadataKey, String metadataValue); /** * Adds a set of new metadata entries for a particular partition * Note that existing entries can not be updated. * * @throws DataSetException in case an attempt is made to update existing entries. */ void addMetadata(long time, Map<String, String> metadata); /** * Remove a partition for a given time. */ void dropPartition(long time); /** * Return the partition associated with the given time, rounded to the minute; * or null if no such partition exists. */ @Nullable TimePartitionDetail getPartitionByTime(long time); /** * Return all partitions within the time range given by startTime (inclusive) and endTime (exclusive), * both rounded to the full minute. */ Set<TimePartitionDetail> getPartitionsByTime(long startTime, long endTime); /** * Return a partition output for a specific time, rounded to the minute, in preparation for creating a new partition. * Obtain the location to write from the PartitionOutput, then call the {@link PartitionOutput#addPartition} * to add the partition to this dataset. */ TimePartitionOutput getPartitionOutput(long time); }
apache-2.0
RyanTech/NewBeyondViewPager
library/src/main/java/net/soulwolf/newbeyondviewpager/NewBeyondViewPager.java
2071
/******************************************************************************* * Copyright 2015-2019 Toaker NewBeyondViewPager * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package net.soulwolf.newbeyondviewpager; import android.annotation.TargetApi; import android.content.Context; import android.os.Build; import android.util.AttributeSet; import android.view.ViewGroup; import com.toaker.common.tlog.TLog; /** * Decorator for NewBeyondViewPager * * @author Toaker [Toaker]([email protected]) * [Toaker](http://www.toaker.com) * @Time Create by 2015/5/14 9:38 */ public class NewBeyondViewPager extends ViewGroup { private static final boolean DEBUG = true; private static final String LOG_TAG = "NewBeyondViewPager:"; public NewBeyondViewPager(Context context) { super(context); } public NewBeyondViewPager(Context context, AttributeSet attrs) { super(context, attrs); } public NewBeyondViewPager(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) public NewBeyondViewPager(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { if(DEBUG){ TLog.d(LOG_TAG,"onLayout: %s %s %s %s",l,t,r,b); } } }
apache-2.0
q474818917/solr-5.2.0
lucene/analysis/common/src/test/org/apache/lucene/analysis/hunspell/TestHunspellStemFilter.java
5177
package org.apache.lucene.analysis.hunspell; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.Collections; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.hunspell.Dictionary; import org.apache.lucene.analysis.hunspell.HunspellStemFilter; import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.IOUtils; import org.junit.AfterClass; import org.junit.BeforeClass; public class TestHunspellStemFilter extends BaseTokenStreamTestCase { private static Dictionary dictionary; @BeforeClass public static void beforeClass() throws Exception { // no multiple try-with to workaround bogus VerifyError InputStream affixStream = TestStemmer.class.getResourceAsStream("simple.aff"); InputStream dictStream = TestStemmer.class.getResourceAsStream("simple.dic"); try { dictionary = new Dictionary(affixStream, dictStream); } finally { IOUtils.closeWhileHandlingException(affixStream, dictStream); } } @AfterClass public static void afterClass() { dictionary = null; } /** Simple test for KeywordAttribute */ public void testKeywordAttribute() throws IOException { MockTokenizer tokenizer = whitespaceMockTokenizer("lucene is awesome"); tokenizer.setEnableChecks(true); HunspellStemFilter filter = new HunspellStemFilter(tokenizer, dictionary); assertTokenStreamContents(filter, new String[]{"lucene", "lucen", "is", "awesome"}, new int[] {1, 0, 1, 1}); // assert with keyword marker tokenizer = whitespaceMockTokenizer("lucene is awesome"); CharArraySet set = new CharArraySet( Arrays.asList("Lucene"), true); filter = new HunspellStemFilter(new SetKeywordMarkerFilter(tokenizer, set), dictionary); assertTokenStreamContents(filter, new String[]{"lucene", "is", "awesome"}, new int[] {1, 1, 1}); } /** simple test for longestOnly option */ public void testLongestOnly() throws IOException { MockTokenizer tokenizer = whitespaceMockTokenizer("lucene is awesome"); tokenizer.setEnableChecks(true); HunspellStemFilter filter = new HunspellStemFilter(tokenizer, dictionary, true, true); assertTokenStreamContents(filter, new String[]{"lucene", "is", "awesome"}, new int[] {1, 1, 1}); } /** blast some random strings through the analyzer */ public void testRandomStrings() throws Exception { Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new HunspellStemFilter(tokenizer, dictionary)); } }; checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); analyzer.close(); } public void testEmptyTerm() throws IOException { Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new KeywordTokenizer(); return new TokenStreamComponents(tokenizer, new HunspellStemFilter(tokenizer, dictionary)); } }; checkOneTerm(a, "", ""); a.close(); } public void testIgnoreCaseNoSideEffects() throws Exception { final Dictionary d; // no multiple try-with to workaround bogus VerifyError InputStream affixStream = TestStemmer.class.getResourceAsStream("simple.aff"); InputStream dictStream = TestStemmer.class.getResourceAsStream("simple.dic"); try { d = new Dictionary(affixStream, Collections.singletonList(dictStream), true); } finally { IOUtils.closeWhileHandlingException(affixStream, dictStream); } Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new KeywordTokenizer(); return new TokenStreamComponents(tokenizer, new HunspellStemFilter(tokenizer, d)); } }; checkOneTerm(a, "NoChAnGy", "NoChAnGy"); a.close(); } }
apache-2.0
srinivasans/educloud
others/editor/Dialogs/colorpicker.php
3928
<?php error_reporting(E_ALL ^ E_NOTICE); include_once("Include_GetString.php") ; $Theme="Office2007"; ?> <html xmlns="http://www.w3.org/1999/xhtml"> <head ID="Head1"> <title><?php echo GetString("MoreColors") ; ?></title> <meta http-equiv="Page-Enter" content="blendTrans(Duration=0.1)" /> <meta http-equiv="Page-Exit" content="blendTrans(Duration=0.1)" /> <script type="text/javascript" src="../Scripts/Dialog/DialogHead.js"></script> <script type="text/javascript" src="../Scripts/Dialog/Dialog_ColorPicker.js"></script> <link href="../Themes/<?php echo $Theme; ?>/dialog.css" type="text/css" rel="stylesheet" /> <style type="text/css"> .colorcell { width:22px; height:11px; cursor:hand; } .colordiv { border:solid 1px #808080; width:22px; height:11px; font-size:1px; } </style> <script> function DoubleHex(v) { if(v<16)return "0"+v.toString(16); return v.toString(16); } function ToHexString(r,g,b) { return ("#"+DoubleHex(r*51)+DoubleHex(g*51)+DoubleHex(b*51)).toUpperCase(); } function MakeHex(z,x,y) { //hor->ver var l=z%2 var t=(z-l)/2 z=l*3+t //left column , l/r mirrow if(z<3)x=5-x; //middle row , t/b mirrow if(z==1||z==4)y=5-y; return ToHexString(5-y,5-x,5-z); } var colors=new Array(216); for(var z=0;z<6;z++) { for(var x=0;x<6;x++) { for(var y=0;y<6;y++) { var hex=MakeHex(z,x,y) var xx=(z%2)*6+x; var yy=Math.floor(z/2)*6+y; colors[yy*12+xx]=hex; } } } var arr=[]; for(var i=0;i<colors.length;i++) { if(i%12==0)arr.push("<tr>"); arr.push("<td class='colorcell'><div class='colordiv' style='background-color:") arr.push(colors[i]); arr.push("' cvalue='"); arr.push(colors[i]); arr.push("' title='") arr.push(colors[i]); arr.push("'>&nbsp;</div></td>"); if(i%12==11)arr.push("</tr>"); } </script> </head> <body> <div id="ajaxdiv"> <div class="tab-pane-control tab-pane" id="tabPane1"> <div class="tab-row"> <h2 class="tab selected"> <a tabindex="-1" href='colorpicker.php?Theme=<?php echo $Theme; ?>&<?php echo $_SERVER["QUERY_STRING"]; ?>'> <span style="white-space:nowrap;"> <?php echo GetString("WebPalette") ; ?> </span> </a> </h2> <h2 class="tab"> <a tabindex="-1" href='colorpicker_basic.php?Theme=<?php echo $Theme; ?>&<?php echo $_SERVER["QUERY_STRING"]; ?>'> <span style="white-space:nowrap;"> <?php echo GetString("NamedColors") ; ?> </span> </a> </h2> <h2 class="tab"> <a tabindex="-1" href='colorpicker_more.php?Theme=<?php echo $Theme; ?>&<?php echo $_SERVER["QUERY_STRING"]; ?>'> <span style="white-space:nowrap;"> <?php echo GetString("CustomColor") ; ?> </span> </a> </h2> </div> <div class="tab-page"> <table cellSpacing='2' cellPadding="1" align="center"> <script> document.write(arr.join("")); </script> <tr> <td colspan="12" height="12"><p align="left"></p> </td> </tr> <tr> <td colspan="12" valign="middle" height="24"> <span style="height:24px;width:50px;vertical-align:middle;"><?php echo GetString("Color") ; ?>: </span>&nbsp; <input type="text" id="divpreview" size="7" maxlength="7" style="width:180px;height:24px;border:#a0a0a0 1px solid; Padding:4;"/> </td> </tr> </table> </div> </div> <div id="container-bottom"> <input type="button" id="buttonok" value="<?php echo GetString("OK") ; ?>" class="formbutton" style="width:70px" onclick="do_insert();" /> &nbsp;&nbsp;&nbsp;&nbsp; <input type="button" id="buttoncancel" value="<?php echo GetString("Cancel") ; ?>" class="formbutton" style="width:70px" onclick="do_Close();" /> </div> </div> </body> </html>
apache-2.0
yaolei/Samoyed
node_modules/material-ui/Card/CardExpandable.js
3257
'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _getPrototypeOf = require('babel-runtime/core-js/object/get-prototype-of'); var _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf); var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck'); var _classCallCheck3 = _interopRequireDefault(_classCallCheck2); var _createClass2 = require('babel-runtime/helpers/createClass'); var _createClass3 = _interopRequireDefault(_createClass2); var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn'); var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2); var _inherits2 = require('babel-runtime/helpers/inherits'); var _inherits3 = _interopRequireDefault(_inherits2); var _simpleAssign = require('simple-assign'); var _simpleAssign2 = _interopRequireDefault(_simpleAssign); var _react = require('react'); var _react2 = _interopRequireDefault(_react); var _propTypes = require('prop-types'); var _propTypes2 = _interopRequireDefault(_propTypes); var _keyboardArrowUp = require('../svg-icons/hardware/keyboard-arrow-up'); var _keyboardArrowUp2 = _interopRequireDefault(_keyboardArrowUp); var _keyboardArrowDown = require('../svg-icons/hardware/keyboard-arrow-down'); var _keyboardArrowDown2 = _interopRequireDefault(_keyboardArrowDown); var _IconButton = require('../IconButton'); var _IconButton2 = _interopRequireDefault(_IconButton); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function getStyles() { return { root: { top: 0, bottom: 0, right: 4, margin: 'auto', position: 'absolute' } }; } var CardExpandable = function (_Component) { (0, _inherits3.default)(CardExpandable, _Component); function CardExpandable() { (0, _classCallCheck3.default)(this, CardExpandable); return (0, _possibleConstructorReturn3.default)(this, (CardExpandable.__proto__ || (0, _getPrototypeOf2.default)(CardExpandable)).apply(this, arguments)); } (0, _createClass3.default)(CardExpandable, [{ key: 'render', value: function render() { var styles = getStyles(this.props, this.context); return _react2.default.createElement( _IconButton2.default, { style: (0, _simpleAssign2.default)(styles.root, this.props.style), onClick: this.props.onExpanding, iconStyle: this.props.iconStyle }, this.props.expanded ? this.props.openIcon : this.props.closeIcon ); } }]); return CardExpandable; }(_react.Component); CardExpandable.contextTypes = { muiTheme: _propTypes2.default.object.isRequired }; CardExpandable.defaultProps = { closeIcon: _react2.default.createElement(_keyboardArrowDown2.default, null), openIcon: _react2.default.createElement(_keyboardArrowUp2.default, null) }; CardExpandable.propTypes = process.env.NODE_ENV !== "production" ? { closeIcon: _propTypes2.default.node, expanded: _propTypes2.default.bool, iconStyle: _propTypes2.default.object, onExpanding: _propTypes2.default.func.isRequired, openIcon: _propTypes2.default.node, style: _propTypes2.default.object } : {}; exports.default = CardExpandable;
apache-2.0
payten/nyu-sakai-10.4
samigo/samigo-app/src/java/org/sakaiproject/tool/assessment/jsf/renderer/DatePickerRenderer.java
6076
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.jsf.renderer; import java.io.IOException; import java.util.Map; import javax.faces.component.UIComponent; import javax.faces.component.UIInput; import javax.faces.component.UIViewRoot; import javax.faces.component.ValueHolder; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import javax.faces.render.Renderer; import org.sakaiproject.util.ResourceLoader; import org.sakaiproject.tool.assessment.jsf.renderer.util.RendererUtil; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; /** * <p>Description: </p> * <p>Render the custom color picker control.</p> * <p>Copyright: Copyright (c) 2004</p> * <p>Organization: Sakai Project</p> * @author Ed Smiley * @version $id: $ */ public class DatePickerRenderer extends Renderer { // icon height and width private static final String HEIGHT = "16"; private static final String WIDTH = "16"; private static final String CURSORSTYLE = "cursor:pointer;"; //moved to properties //private static final String CLICKALT = "Click Here to Pick Date"; public boolean supportsComponentType(UIComponent component) { return (component instanceof UIInput); } /** * decode the value * @param context * @param component */ public void decode(FacesContext context, UIComponent component) { // we haven't added these attributes--yet--defensive programming... if(RendererUtil.isDisabledOrReadonly(component)) { return; } String clientId = component.getClientId(context); Map requestParameterMap = context.getExternalContext() .getRequestParameterMap(); String newValue = (String) requestParameterMap.get(clientId ); UIInput comp = (UIInput) component; comp.setSubmittedValue(newValue); } public void encodeBegin(FacesContext context, UIComponent component) throws IOException { ; } public void encodeChildren(FacesContext context, UIComponent component) throws IOException { ; } /** * <p>Faces render output method .</p> * <p>Method Generator: org.sakaiproject.tool.assessment.devtoolsRenderMaker</p> * * @param context <code>FacesContext</code> for the current request * @param component <code>UIComponent</code> being rendered * * @throws IOException if an input/output error occurs */ public void encodeEnd(FacesContext context, UIComponent component) throws IOException { ResourceLoader rb= new ResourceLoader("org.sakaiproject.tool.assessment.bundle.AssessmentSettingsMessages"); ResponseWriter writer = context.getResponseWriter(); String contextPath = context.getExternalContext() .getRequestContextPath(); String jsfId = (String) component.getAttributes().get("id"); String id = jsfId; if (component.getId() != null && !component.getId().startsWith(UIViewRoot.UNIQUE_ID_PREFIX)) { id = component.getClientId(context); } Object value = null; if (component instanceof UIInput) { value = ( (UIInput) component).getSubmittedValue(); } if (value == null && component instanceof ValueHolder) { value = ( (ValueHolder) component).getValue(); } String valString = ""; if (value != null) { valString = value.toString(); } String type = "text"; String size = (String) component.getAttributes().get("size"); if (size == null) { size = "20"; // script creates unique calendar object with input object } String display_dateFormat= ContextUtil.getLocalizedString("org.sakaiproject.tool.assessment.bundle.GeneralMessages","output_data_picker_w_sec"); String genDate = null; String prsDate = null; if (display_dateFormat.toLowerCase().startsWith("dd")) { genDate = "cal_gen_date2_dm"; prsDate = "cal_prs_date2_dm"; } else { genDate = "cal_gen_date2_md"; prsDate = "cal_prs_date2_md"; } String calRand = "cal" + ("" + Math.random()).substring(2); String calScript = "var " + calRand + " = new calendar2(" + "document.getElementById('" + id + "'), " + genDate + ", " + prsDate + ");" + "" + calRand + ".year_scroll = true;" + "" + calRand + ".time_comp = true;"; writer.write("<input type=\"" + type + "\" name=\"" + id + "\" id=\"" + id + "\" size=\"" + size + "\" value="); writer.write("\"" + valString + "\">&#160;<img \n onclick="); writer.write("\"javascript:" + calScript + calRand + ".popup('','" + contextPath + "/html/');\"\n"); // "/jsf/widget/datepicker/');\"\n"); writer.write(" width=\"" + WIDTH + "\"\n"); writer.write(" height=\"" + HEIGHT + "\"\n"); writer.write(" style=\"" + CURSORSTYLE + "\" "); writer.write(" src=\"" + contextPath + "/images/calendar/cal.gif\"\n"); writer.write(" border=\"0\"\n"); writer.write(" id=\"_datePickerPop_" + id + "\""); //writer.write(" alt=\"" + CLICKALT + "\"/>&#160;&#160;\n"); writer.write(" alt=\"" + rb.getString("dp_CLICKALT") + "\"/>&#160;&#160;\n"); } }
apache-2.0
tkarling/angular
modules/benchmarks/src/compiler/compiler_benchmark.ts
7166
import {BrowserDomAdapter} from 'angular2/src/dom/browser_adapter'; import {PromiseWrapper} from 'angular2/src/facade/async'; import {List, ListWrapper, Map, MapWrapper} from 'angular2/src/facade/collection'; import {DateWrapper, Type, print} from 'angular2/src/facade/lang'; import { Parser, Lexer, DynamicChangeDetection } from 'angular2/src/change_detection/change_detection'; import {Compiler, CompilerCache} from 'angular2/src/core/compiler/compiler'; import {DirectiveResolver} from 'angular2/src/core/compiler/directive_resolver'; import {PipeResolver} from 'angular2/src/core/compiler/pipe_resolver'; import * as viewModule from 'angular2/src/core/annotations_impl/view'; import {Component, Directive, View} from 'angular2/angular2'; import {ViewResolver} from 'angular2/src/core/compiler/view_resolver'; import {UrlResolver} from 'angular2/src/services/url_resolver'; import {AppRootUrl} from 'angular2/src/services/app_root_url'; import {ComponentUrlMapper} from 'angular2/src/core/compiler/component_url_mapper'; import {reflector} from 'angular2/src/reflection/reflection'; import {ReflectionCapabilities} from 'angular2/src/reflection/reflection_capabilities'; import {getIntParameter, bindAction} from 'angular2/src/test_lib/benchmark_util'; import {ProtoViewFactory} from 'angular2/src/core/compiler/proto_view_factory'; import { ViewLoader, DefaultDomCompiler, SharedStylesHost, TemplateCloner } from 'angular2/src/render/render'; import {DomElementSchemaRegistry} from 'angular2/src/render/dom/schema/dom_element_schema_registry'; export function main() { BrowserDomAdapter.makeCurrent(); var count = getIntParameter('elements'); reflector.reflectionCapabilities = new ReflectionCapabilities(); var reader = new DirectiveResolver(); var pipeResolver = new PipeResolver(); var cache = new CompilerCache(); var viewResolver = new MultipleViewResolver( count, [BenchmarkComponentNoBindings, BenchmarkComponentWithBindings]); var urlResolver = new UrlResolver(); var appRootUrl = new AppRootUrl(""); var renderCompiler = new DefaultDomCompiler( new DomElementSchemaRegistry(), new TemplateCloner(-1), new Parser(new Lexer()), new ViewLoader(null, null, null), new SharedStylesHost(), 'a'); var compiler = new Compiler(reader, pipeResolver, [], cache, viewResolver, new ComponentUrlMapper(), urlResolver, renderCompiler, new ProtoViewFactory(new DynamicChangeDetection()), appRootUrl); function measureWrapper(func, desc) { return function() { var begin = DateWrapper.now(); print(`[${desc}] Begin...`); var onSuccess = function(_) { var elapsedMs = DateWrapper.toMillis(DateWrapper.now()) - DateWrapper.toMillis(begin); print(`[${desc}] ...done, took ${elapsedMs} ms`); }; PromiseWrapper.then(func(), onSuccess, null); }; } function compileNoBindings() { cache.clear(); return compiler.compileInHost(BenchmarkComponentNoBindings); } function compileWithBindings() { cache.clear(); return compiler.compileInHost(BenchmarkComponentWithBindings); } bindAction('#compileNoBindings', measureWrapper(compileNoBindings, 'No Bindings')); bindAction('#compileWithBindings', measureWrapper(compileWithBindings, 'With Bindings')); } @Directive({selector: '[dir0]', properties: ['prop: attr0']}) class Dir0 { } @Directive({selector: '[dir1]', properties: ['prop: attr1']}) class Dir1 { constructor(dir0: Dir0) {} } @Directive({selector: '[dir2]', properties: ['prop: attr2']}) class Dir2 { constructor(dir1: Dir1) {} } @Directive({selector: '[dir3]', properties: ['prop: attr3']}) class Dir3 { constructor(dir2: Dir2) {} } @Directive({selector: '[dir4]', properties: ['prop: attr4']}) class Dir4 { constructor(dir3: Dir3) {} } class MultipleViewResolver extends ViewResolver { _multiple: number; _cache: Map<any, any>; constructor(multiple: number, components: List<Type>) { super(); this._multiple = multiple; this._cache = new Map(); ListWrapper.forEach(components, (c) => this._warmUp(c)); } _warmUp(component: Type) { var view = super.resolve(component); var multiplier = ListWrapper.createFixedSize(this._multiple); for (var i = 0; i < this._multiple; ++i) { multiplier[i] = view.template; } this._cache.set(component, ListWrapper.join(multiplier, '')); } resolve(component: Type): viewModule.View { var view = super.resolve(component); var myView = new viewModule.View( {template:<string>this._cache.get(component), directives: view.directives}); return myView; } } @Component({selector: 'cmp-nobind'}) @View({ directives: [Dir0, Dir1, Dir2, Dir3, Dir4], template: ` <div class="class0 class1 class2 class3 class4 " nodir0="" attr0="value0" nodir1="" attr1="value1" nodir2="" attr2="value2" nodir3="" attr3="value3" nodir4="" attr4="value4"> <div class="class0 class1 class2 class3 class4 " nodir0="" attr0="value0" nodir1="" attr1="value1" nodir2="" attr2="value2" nodir3="" attr3="value3" nodir4="" attr4="value4"> <div class="class0 class1 class2 class3 class4 " nodir0="" attr0="value0" nodir1="" attr1="value1" nodir2="" attr2="value2" nodir3="" attr3="value3" nodir4="" attr4="value4"> <div class="class0 class1 class2 class3 class4 " nodir0="" attr0="value0" nodir1="" attr1="value1" nodir2="" attr2="value2" nodir3="" attr3="value3" nodir4="" attr4="value4"> <div class="class0 class1 class2 class3 class4 " nodir0="" attr0="value0" nodir1="" attr1="value1" nodir2="" attr2="value2" nodir3="" attr3="value3" nodir4="" attr4="value4"> </div> </div> </div> </div> </div>` }) class BenchmarkComponentNoBindings { } @Component({selector: 'cmp-withbind'}) @View({ directives: [Dir0, Dir1, Dir2, Dir3, Dir4], template: ` <div class="class0 class1 class2 class3 class4 " dir0="" [attr0]="value0" dir1="" [attr1]="value1" dir2="" [attr2]="value2" dir3="" [attr3]="value3" dir4="" [attr4]="value4"> {{inter0}}{{inter1}}{{inter2}}{{inter3}}{{inter4}} <div class="class0 class1 class2 class3 class4 " dir0="" [attr0]="value0" dir1="" [attr1]="value1" dir2="" [attr2]="value2" dir3="" [attr3]="value3" dir4="" [attr4]="value4"> {{inter0}}{{inter1}}{{inter2}}{{inter3}}{{inter4}} <div class="class0 class1 class2 class3 class4 " dir0="" [attr0]="value0" dir1="" [attr1]="value1" dir2="" [attr2]="value2" dir3="" [attr3]="value3" dir4="" [attr4]="value4"> {{inter0}}{{inter1}}{{inter2}}{{inter3}}{{inter4}} <div class="class0 class1 class2 class3 class4 " dir0="" [attr0]="value0" dir1="" [attr1]="value1" dir2="" [attr2]="value2" dir3="" [attr3]="value3" dir4="" [attr4]="value4"> {{inter0}}{{inter1}}{{inter2}}{{inter3}}{{inter4}} <div class="class0 class1 class2 class3 class4 " dir0="" [attr0]="value0" dir1="" [attr1]="value1" dir2="" [attr2]="value2" dir3="" [attr3]="value3" dir4="" [attr4]="value4"> {{inter0}}{{inter1}}{{inter2}}{{inter3}}{{inter4}} </div> </div> </div> </div> </div>` }) class BenchmarkComponentWithBindings { }
apache-2.0
yurydelendik/shumway
src/flash/display/IBitmapDrawable.ts
891
/** * Copyright 2014 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Class: IBitmapDrawable module Shumway.AVMX.AS.flash.display { import notImplemented = Shumway.Debug.notImplemented; import axCoerceString = Shumway.AVMX.axCoerceString; export interface IBitmapDrawable { // JS -> AS Bindings // AS -> JS Bindings } }
apache-2.0
richardjgowers/csg
include/votca/csg/cgmoleculedef.h
2482
/* * Copyright 2009-2011 The VOTCA Development Team (http://www.votca.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #ifndef _cgmoleculedef_H #define _cgmoleculedef_H #include <string> #include <vector> #include <map> #include <string> #include <votca/tools/property.h> #include "map.h" #include <votca/tools/types.h> #include "exclusionlist.h" #include "molecule.h" namespace votca { namespace csg { using namespace votca::tools; using namespace std; /** \brief definition of a coarse grained molecule This class is to define a coarse grained molecule, which includes the topology, mapping, ... \todo clean up this class, do the bonded interactions right!!!! \todo check for consistency of xml file, seperate xml parser and class!! */ class CGMoleculeDef { public: CGMoleculeDef() {} ~CGMoleculeDef(); Molecule *CreateMolecule(Topology & top); Map *CreateMap(Molecule &in, Molecule &out); void Load(string filename); const string &getName() { return _name; } const string &getIdent() { return _ident; } private: Property _options; struct beaddef_t { string _name; string _type; byte_t _symmetry; string _mapping; vector<string> _subbeads; Property *_options; }; // name of the coarse grained molecule string _name; // name of the molecule to coarse grain string _ident; // beads of the cg molecule vector<beaddef_t *> _beads; map<string, beaddef_t *> _beads_by_name; // mapping schemes map<string, Property *> _maps; list<Property *> _bonded; void ParseTopology(Property &options); void ParseBeads(Property &options); void ParseBonded(Property &options); void ParseMapping(Property &options); beaddef_t *getBeadByName(const string &name); Property *getMapByName(const string &name); }; }} #endif /* _cgmoleculedef_H */
apache-2.0
galak/zephyr
lib/libc/minimal/source/stdlib/malloc.c
3247
/* * Copyright (c) 2018 Intel Corporation * * SPDX-License-Identifier: Apache-2.0 */ #include <stdlib.h> #include <zephyr.h> #include <init.h> #include <errno.h> #include <sys/math_extras.h> #include <string.h> #include <app_memory/app_memdomain.h> #include <sys/mutex.h> #include <sys/sys_heap.h> #include <zephyr/types.h> #define LOG_LEVEL CONFIG_KERNEL_LOG_LEVEL #include <logging/log.h> LOG_MODULE_DECLARE(os, CONFIG_KERNEL_LOG_LEVEL); #ifdef CONFIG_MINIMAL_LIBC_MALLOC #if (CONFIG_MINIMAL_LIBC_MALLOC_ARENA_SIZE > 0) #ifdef CONFIG_USERSPACE K_APPMEM_PARTITION_DEFINE(z_malloc_partition); #define POOL_SECTION K_APP_DMEM_SECTION(z_malloc_partition) #else #define POOL_SECTION .bss #endif /* CONFIG_USERSPACE */ #define HEAP_BYTES CONFIG_MINIMAL_LIBC_MALLOC_ARENA_SIZE Z_GENERIC_SECTION(POOL_SECTION) static struct sys_heap z_malloc_heap; Z_GENERIC_SECTION(POOL_SECTION) struct sys_mutex z_malloc_heap_mutex; Z_GENERIC_SECTION(POOL_SECTION) static char z_malloc_heap_mem[HEAP_BYTES]; void *malloc(size_t size) { int lock_ret; lock_ret = sys_mutex_lock(&z_malloc_heap_mutex, K_FOREVER); __ASSERT_NO_MSG(lock_ret == 0); void *ret = sys_heap_aligned_alloc(&z_malloc_heap, __alignof__(z_max_align_t), size); if (ret == NULL && size != 0) { errno = ENOMEM; } (void) sys_mutex_unlock(&z_malloc_heap_mutex); return ret; } static int malloc_prepare(const struct device *unused) { ARG_UNUSED(unused); sys_heap_init(&z_malloc_heap, z_malloc_heap_mem, HEAP_BYTES); sys_mutex_init(&z_malloc_heap_mutex); return 0; } void *realloc(void *ptr, size_t requested_size) { int lock_ret; lock_ret = sys_mutex_lock(&z_malloc_heap_mutex, K_FOREVER); __ASSERT_NO_MSG(lock_ret == 0); void *ret = sys_heap_aligned_realloc(&z_malloc_heap, ptr, __alignof__(z_max_align_t), requested_size); if (ret == NULL && requested_size != 0) { errno = ENOMEM; } (void) sys_mutex_unlock(&z_malloc_heap_mutex); return ret; } void free(void *ptr) { int lock_ret; lock_ret = sys_mutex_lock(&z_malloc_heap_mutex, K_FOREVER); __ASSERT_NO_MSG(lock_ret == 0); sys_heap_free(&z_malloc_heap, ptr); (void) sys_mutex_unlock(&z_malloc_heap_mutex); } SYS_INIT(malloc_prepare, APPLICATION, CONFIG_KERNEL_INIT_PRIORITY_DEFAULT); #else /* No malloc arena */ void *malloc(size_t size) { ARG_UNUSED(size); LOG_ERR("CONFIG_MINIMAL_LIBC_MALLOC_ARENA_SIZE is 0"); errno = ENOMEM; return NULL; } void free(void *ptr) { ARG_UNUSED(ptr); } void *realloc(void *ptr, size_t size) { ARG_UNUSED(ptr); return malloc(size); } #endif #endif /* CONFIG_MINIMAL_LIBC_MALLOC */ #ifdef CONFIG_MINIMAL_LIBC_CALLOC void *calloc(size_t nmemb, size_t size) { void *ret; if (size_mul_overflow(nmemb, size, &size)) { errno = ENOMEM; return NULL; } ret = malloc(size); if (ret != NULL) { (void)memset(ret, 0, size); } return ret; } #endif /* CONFIG_MINIMAL_LIBC_CALLOC */ #ifdef CONFIG_MINIMAL_LIBC_REALLOCARRAY void *reallocarray(void *ptr, size_t nmemb, size_t size) { #if (CONFIG_MINIMAL_LIBC_MALLOC_ARENA_SIZE > 0) if (size_mul_overflow(nmemb, size, &size)) { errno = ENOMEM; return NULL; } return realloc(ptr, size); #else return NULL; #endif } #endif /* CONFIG_MINIMAL_LIBC_REALLOCARRAY */
apache-2.0
cstamas/maven
maven-compat/src/test/java/org/apache/maven/project/inheritance/t08/ProjectInheritanceTest.java
3173
package org.apache.maven.project.inheritance.t08; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.util.Iterator; import java.util.Set; import org.apache.maven.artifact.Artifact; import org.apache.maven.project.MavenProject; import org.apache.maven.project.inheritance.AbstractProjectInheritanceTestCase; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; /** * A test which demonstrates maven's dependency management * * @author <a href="[email protected]">Ralph Goers</a> */ public class ProjectInheritanceTest extends AbstractProjectInheritanceTestCase { // ---------------------------------------------------------------------- // // p1 inherits from p0 // p0 inherits from super model // // or we can show it graphically as: // // p1 ---> p0 --> super model // // ---------------------------------------------------------------------- @Test public void testDependencyManagement() throws Exception { File localRepo = getLocalRepositoryPath(); File pom0 = new File( localRepo, "p0/pom.xml" ); File pom0Basedir = pom0.getParentFile(); File pom1 = new File( pom0Basedir, "p1/pom.xml" ); // load everything... MavenProject project0 = getProjectWithDependencies( pom0 ); MavenProject project1 = getProjectWithDependencies( pom1 ); assertEquals( pom0Basedir, project1.getParent().getBasedir() ); System.out.println( "Project " + project1.getId() + " " + project1 ); Set set = project1.getArtifacts(); assertNotNull( set, "No artifacts" ); assertTrue( set.size() > 0, "No Artifacts" ); Iterator iter = set.iterator(); assertTrue( set.size() == 4, "Set size should be 4, is " + set.size() ); while ( iter.hasNext() ) { Artifact artifact = (Artifact) iter.next(); System.out.println( "Artifact: " + artifact.getDependencyConflictId() + " " + artifact.getVersion() + " Optional=" + ( artifact.isOptional() ? "true" : "false" ) ); assertTrue( artifact.getVersion().equals( "1.0" ), "Incorrect version for " + artifact.getDependencyConflictId() ); } } }
apache-2.0
adeelmahmood/lens
lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
20153
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.lens.cube.metadata; import java.lang.reflect.Constructor; import java.util.*; import java.util.Map.Entry; import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.*; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.mapred.TextInputFormat; import com.google.common.collect.Maps; /** * Storage is Named Interface which would represent the underlying storage of the data. */ public abstract class Storage extends AbstractCubeTable implements PartitionMetahook { private static final List<FieldSchema> COLUMNS = new ArrayList<FieldSchema>(); static { COLUMNS.add(new FieldSchema("dummy", "string", "dummy column")); } protected Storage(String name, Map<String, String> properties) { super(name, COLUMNS, properties, 0L); addProperties(); } public Storage(Table hiveTable) { super(hiveTable); } /** * Get the name prefix of the storage * * @return Name followed by storage separator */ public String getPrefix() { return getPrefix(getName()); } @Override public CubeTableType getTableType() { return CubeTableType.STORAGE; } @Override public Set<String> getStorages() { throw new NotImplementedException(); } @Override protected void addProperties() { super.addProperties(); getProperties().put(MetastoreUtil.getStorageClassKey(getName()), getClass().getCanonicalName()); } /** * Get the name prefix of the storage * * @param name Name of the storage * @return Name followed by storage separator */ public static String getPrefix(String name) { return name + StorageConstants.STORGAE_SEPARATOR; } public static final class LatestInfo { Map<String, LatestPartColumnInfo> latestParts = new HashMap<String, LatestPartColumnInfo>(); Partition part = null; void addLatestPartInfo(String partCol, LatestPartColumnInfo partInfo) { latestParts.put(partCol, partInfo); } void setPart(Partition part) { this.part = part; } } public static final class LatestPartColumnInfo extends HashMap<String, String> { public LatestPartColumnInfo(Map<String, String> partParams) { putAll(partParams); } public Map<String, String> getPartParams(Map<String, String> parentParams) { putAll(parentParams); return this; } } /** * Get the storage table descriptor for the given parent table. * * @param client The metastore client * @param parent Is either Fact or Dimension table * @param crtTbl Create table info * @return Table describing the storage table * @throws HiveException */ public Table getStorageTable(Hive client, Table parent, StorageTableDesc crtTbl) throws HiveException { String storageTableName = MetastoreUtil.getStorageTableName(parent.getTableName(), this.getPrefix()); Table tbl = client.getTable(storageTableName, false); if (tbl == null) { tbl = client.newTable(storageTableName); } tbl.getTTable().setSd(new StorageDescriptor(parent.getTTable().getSd())); if (crtTbl.getTblProps() != null) { tbl.getTTable().getParameters().putAll(crtTbl.getTblProps()); } if (crtTbl.getPartCols() != null) { tbl.setPartCols(crtTbl.getPartCols()); } if (crtTbl.getNumBuckets() != -1) { tbl.setNumBuckets(crtTbl.getNumBuckets()); } if (!StringUtils.isBlank(crtTbl.getStorageHandler())) { tbl.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, crtTbl.getStorageHandler()); } HiveStorageHandler storageHandler = tbl.getStorageHandler(); if (crtTbl.getSerName() == null) { if (storageHandler == null || storageHandler.getSerDeClass() == null) { tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); } else { String serDeClassName = storageHandler.getSerDeClass().getName(); tbl.setSerializationLib(serDeClassName); } } else { // let's validate that the serde exists tbl.setSerializationLib(crtTbl.getSerName()); } if (crtTbl.getFieldDelim() != null) { tbl.setSerdeParam(serdeConstants.FIELD_DELIM, crtTbl.getFieldDelim()); tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, crtTbl.getFieldDelim()); } if (crtTbl.getFieldEscape() != null) { tbl.setSerdeParam(serdeConstants.ESCAPE_CHAR, crtTbl.getFieldEscape()); } if (crtTbl.getCollItemDelim() != null) { tbl.setSerdeParam(serdeConstants.COLLECTION_DELIM, crtTbl.getCollItemDelim()); } if (crtTbl.getMapKeyDelim() != null) { tbl.setSerdeParam(serdeConstants.MAPKEY_DELIM, crtTbl.getMapKeyDelim()); } if (crtTbl.getLineDelim() != null) { tbl.setSerdeParam(serdeConstants.LINE_DELIM, crtTbl.getLineDelim()); } if (crtTbl.getSerdeProps() != null) { for (Entry<String, String> m : crtTbl.getSerdeProps().entrySet()) { tbl.setSerdeParam(m.getKey(), m.getValue()); } } if (crtTbl.getBucketCols() != null) { tbl.setBucketCols(crtTbl.getBucketCols()); } if (crtTbl.getSortCols() != null) { tbl.setSortCols(crtTbl.getSortCols()); } if (crtTbl.getComment() != null) { tbl.setProperty("comment", crtTbl.getComment()); } if (crtTbl.getLocation() != null) { tbl.setDataLocation(new Path(crtTbl.getLocation())); } if (crtTbl.getSkewedColNames() != null) { tbl.setSkewedColNames(crtTbl.getSkewedColNames()); } if (crtTbl.getSkewedColValues() != null) { tbl.setSkewedColValues(crtTbl.getSkewedColValues()); } tbl.setStoredAsSubDirectories(crtTbl.isStoredAsSubDirectories()); if (crtTbl.getInputFormat() != null) { tbl.setInputFormatClass(crtTbl.getInputFormat()); } else { tbl.setInputFormatClass(TextInputFormat.class.getName()); } if (crtTbl.getOutputFormat() != null) { tbl.setOutputFormatClass(crtTbl.getOutputFormat()); } else { tbl.setOutputFormatClass(IgnoreKeyTextOutputFormat.class.getName()); } tbl.getTTable().getSd().setInputFormat(tbl.getInputFormatClass().getName()); tbl.getTTable().getSd().setOutputFormat(tbl.getOutputFormatClass().getName()); if (crtTbl.isExternal()) { tbl.setProperty("EXTERNAL", "TRUE"); tbl.setTableType(TableType.EXTERNAL_TABLE); } return tbl; } /** * Add single partition to storage. Just calls #addPartitions. * @param client * @param addPartitionDesc * @param latestInfo * @throws HiveException */ public List<Partition> addPartition(Hive client, StoragePartitionDesc addPartitionDesc, LatestInfo latestInfo) throws HiveException { Map<Map<String, String>, LatestInfo> latestInfos = Maps.newHashMap(); latestInfos.put(addPartitionDesc.getNonTimePartSpec(), latestInfo); return addPartitions(client, addPartitionDesc.getCubeTableName(), addPartitionDesc.getUpdatePeriod(), Collections.singletonList(addPartitionDesc), latestInfos); } /** * Add given partitions in the underlying hive table and update latest partition links * * @param client hive client instance * @param factOrDimTable fact or dim name * @param updatePeriod update period of partitions. * @param storagePartitionDescs all partitions to be added * @param latestInfos new latest info. atleast one partition for the latest value exists for each part * column * @throws HiveException */ public List<Partition> addPartitions(Hive client, String factOrDimTable, UpdatePeriod updatePeriod, List<StoragePartitionDesc> storagePartitionDescs, Map<Map<String, String>, LatestInfo> latestInfos) throws HiveException { preAddPartitions(storagePartitionDescs); Map<Map<String, String>, Map<String, Integer>> latestPartIndexForPartCols = Maps.newHashMap(); boolean success = false; try { String tableName = MetastoreUtil.getStorageTableName(factOrDimTable, this.getPrefix()); String dbName = SessionState.get().getCurrentDatabase(); AddPartitionDesc addParts = new AddPartitionDesc(dbName, tableName, true); Table storageTbl = client.getTable(dbName, tableName); for (StoragePartitionDesc addPartitionDesc : storagePartitionDescs) { String location = null; if (addPartitionDesc.getLocation() != null) { Path partLocation = new Path(addPartitionDesc.getLocation()); if (partLocation.isAbsolute()) { location = addPartitionDesc.getLocation(); } else { location = new Path(storageTbl.getPath(), partLocation).toString(); } } Map<String, String> partParams = addPartitionDesc.getPartParams(); if (partParams == null) { partParams = new HashMap<String, String>(); } partParams.put(MetastoreConstants.PARTITION_UPDATE_PERIOD, addPartitionDesc.getUpdatePeriod().name()); addParts.addPartition(addPartitionDesc.getStoragePartSpec(), location); int curIndex = addParts.getPartitionCount() - 1; addParts.getPartition(curIndex).setPartParams(partParams); addParts.getPartition(curIndex).setInputFormat(addPartitionDesc.getInputFormat()); addParts.getPartition(curIndex).setOutputFormat(addPartitionDesc.getOutputFormat()); addParts.getPartition(curIndex).setNumBuckets(addPartitionDesc.getNumBuckets()); addParts.getPartition(curIndex).setCols(addPartitionDesc.getCols()); addParts.getPartition(curIndex).setSerializationLib(addPartitionDesc.getSerializationLib()); addParts.getPartition(curIndex).setSerdeParams(addPartitionDesc.getSerdeParams()); addParts.getPartition(curIndex).setBucketCols(addPartitionDesc.getBucketCols()); addParts.getPartition(curIndex).setSortCols(addPartitionDesc.getSortCols()); if (latestInfos != null && latestInfos.get(addPartitionDesc.getNonTimePartSpec()) != null) { for (Map.Entry<String, LatestPartColumnInfo> entry : latestInfos .get(addPartitionDesc.getNonTimePartSpec()).latestParts.entrySet()) { if (addPartitionDesc.getTimePartSpec().containsKey(entry.getKey()) && entry.getValue().get(MetastoreUtil.getLatestPartTimestampKey(entry.getKey())).equals( updatePeriod.format().format(addPartitionDesc.getTimePartSpec().get(entry.getKey())))) { if (latestPartIndexForPartCols.get(addPartitionDesc.getNonTimePartSpec()) == null) { latestPartIndexForPartCols.put(addPartitionDesc.getNonTimePartSpec(), Maps.<String, Integer>newHashMap()); } latestPartIndexForPartCols.get(addPartitionDesc.getNonTimePartSpec()).put(entry.getKey(), curIndex); } } } } if (latestInfos != null) { for (Map.Entry<Map<String, String>, LatestInfo> entry1 : latestInfos.entrySet()) { Map<String, String> nonTimeParts = entry1.getKey(); LatestInfo latestInfo = entry1.getValue(); for (Map.Entry<String, LatestPartColumnInfo> entry : latestInfo.latestParts.entrySet()) { // symlink this partition to latest List<Partition> latest; String latestPartCol = entry.getKey(); try { latest = client .getPartitionsByFilter(storageTbl, StorageConstants.getLatestPartFilter(latestPartCol, nonTimeParts)); } catch (Exception e) { throw new HiveException("Could not get latest partition", e); } if (!latest.isEmpty()) { client.dropPartition(storageTbl.getTableName(), latest.get(0).getValues(), false); } if (latestPartIndexForPartCols.get(nonTimeParts).containsKey(latestPartCol)) { AddPartitionDesc.OnePartitionDesc latestPartWithFullTimestamp = addParts.getPartition( latestPartIndexForPartCols.get(nonTimeParts).get(latestPartCol)); addParts.addPartition( StorageConstants.getLatestPartSpec(latestPartWithFullTimestamp.getPartSpec(), latestPartCol), latestPartWithFullTimestamp.getLocation()); int curIndex = addParts.getPartitionCount() - 1; addParts.getPartition(curIndex).setPartParams(entry.getValue().getPartParams( latestPartWithFullTimestamp.getPartParams())); addParts.getPartition(curIndex).setInputFormat(latestPartWithFullTimestamp.getInputFormat()); addParts.getPartition(curIndex).setOutputFormat(latestPartWithFullTimestamp.getOutputFormat()); addParts.getPartition(curIndex).setNumBuckets(latestPartWithFullTimestamp.getNumBuckets()); addParts.getPartition(curIndex).setCols(latestPartWithFullTimestamp.getCols()); addParts.getPartition(curIndex).setSerializationLib(latestPartWithFullTimestamp.getSerializationLib()); addParts.getPartition(curIndex).setSerdeParams(latestPartWithFullTimestamp.getSerdeParams()); addParts.getPartition(curIndex).setBucketCols(latestPartWithFullTimestamp.getBucketCols()); addParts.getPartition(curIndex).setSortCols(latestPartWithFullTimestamp.getSortCols()); } } } } List<Partition> partitionsAdded = client.createPartitions(addParts); success = true; return partitionsAdded; } finally { if (success) { commitAddPartitions(storagePartitionDescs); } else { rollbackAddPartitions(storagePartitionDescs); } } } /** * Update existing partition * @param client hive client instance * @param fact fact name * @param partition partition to be updated * @throws InvalidOperationException * @throws HiveException */ public void updatePartition(Hive client, String fact, Partition partition) throws InvalidOperationException, HiveException { client.alterPartition(MetastoreUtil.getFactOrDimtableStorageTableName(fact, getName()), partition); } /** * Update existing partitions * @param client hive client instance * @param fact fact name * @param partitions partitions to be updated * @throws InvalidOperationException * @throws HiveException */ public void updatePartitions(Hive client, String fact, List<Partition> partitions) throws InvalidOperationException, HiveException { boolean success = false; try { client.alterPartitions(MetastoreUtil.getFactOrDimtableStorageTableName(fact, getName()), partitions); success = true; } finally { if (success) { commitUpdatePartition(partitions); } else { rollbackUpdatePartition(partitions); } } } /** * Drop the partition in the underlying hive table and update latest partition link * * @param client The metastore client * @param storageTableName TableName * @param partVals Partition specification * @param updateLatestInfo The latest partition info if it needs update, null if latest should not be updated * @param nonTimePartSpec * @throws HiveException */ public void dropPartition(Hive client, String storageTableName, List<String> partVals, Map<String, LatestInfo> updateLatestInfo, Map<String, String> nonTimePartSpec) throws HiveException { preDropPartition(storageTableName, partVals); boolean success = false; try { client.dropPartition(storageTableName, partVals, false); String dbName = SessionState.get().getCurrentDatabase(); Table storageTbl = client.getTable(storageTableName); // update latest info if (updateLatestInfo != null) { for (Entry<String, LatestInfo> entry : updateLatestInfo.entrySet()) { String latestPartCol = entry.getKey(); // symlink this partition to latest List<Partition> latestParts; try { latestParts = client.getPartitionsByFilter(storageTbl, StorageConstants.getLatestPartFilter(latestPartCol, nonTimePartSpec)); MetastoreUtil.filterPartitionsByNonTimeParts(latestParts, nonTimePartSpec, latestPartCol); } catch (Exception e) { throw new HiveException("Could not get latest partition", e); } if (!latestParts.isEmpty()) { assert latestParts.size() == 1; client.dropPartition(storageTbl.getTableName(), latestParts.get(0).getValues(), false); } LatestInfo latest = entry.getValue(); if (latest != null && latest.part != null) { AddPartitionDesc latestPart = new AddPartitionDesc(dbName, storageTableName, true); latestPart.addPartition(StorageConstants.getLatestPartSpec(latest.part.getSpec(), latestPartCol), latest.part.getLocation()); latestPart.getPartition(0).setPartParams( latest.latestParts.get(latestPartCol).getPartParams(latest.part.getParameters())); latestPart.getPartition(0).setInputFormat(latest.part.getInputFormatClass().getCanonicalName()); latestPart.getPartition(0).setOutputFormat(latest.part.getOutputFormatClass().getCanonicalName()); latestPart.getPartition(0).setNumBuckets(latest.part.getBucketCount()); latestPart.getPartition(0).setCols(latest.part.getCols()); latestPart.getPartition(0).setSerializationLib( latest.part.getTPartition().getSd().getSerdeInfo().getSerializationLib()); latestPart.getPartition(0).setSerdeParams( latest.part.getTPartition().getSd().getSerdeInfo().getParameters()); latestPart.getPartition(0).setBucketCols(latest.part.getBucketCols()); latestPart.getPartition(0).setSortCols(latest.part.getSortCols()); client.createPartitions(latestPart); } } } success = true; } finally { if (success) { commitDropPartition(storageTableName, partVals); } else { rollbackDropPartition(storageTableName, partVals); } } } static Storage createInstance(Table tbl) throws HiveException { String storageName = tbl.getTableName(); String storageClassName = tbl.getParameters().get(MetastoreUtil.getStorageClassKey(storageName)); try { Class<?> clazz = Class.forName(storageClassName); Constructor<?> constructor = clazz.getConstructor(Table.class); return (Storage) constructor.newInstance(tbl); } catch (Exception e) { throw new HiveException("Could not create storage class" + storageClassName, e); } } }
apache-2.0
LabAixBidouille/EmbeddedTeam
courses/examples/CMSIS/Device/ATMEL/samv71/include/instance/gmac.h
43746
/* ---------------------------------------------------------------------------- */ /* Atmel Microcontroller Software Support */ /* SAM Software Package License */ /* ---------------------------------------------------------------------------- */ /* Copyright (c) 2015, Atmel Corporation */ /* */ /* All rights reserved. */ /* */ /* Redistribution and use in source and binary forms, with or without */ /* modification, are permitted provided that the following condition is met: */ /* */ /* - Redistributions of source code must retain the above copyright notice, */ /* this list of conditions and the disclaimer below. */ /* */ /* Atmel's name may not be used to endorse or promote products derived from */ /* this software without specific prior written permission. */ /* */ /* DISCLAIMER: THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR */ /* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */ /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE */ /* DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR ANY DIRECT, INDIRECT, */ /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT */ /* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, */ /* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */ /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING */ /* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, */ /* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* ---------------------------------------------------------------------------- */ #ifndef _SAMV71_GMAC_INSTANCE_ #define _SAMV71_GMAC_INSTANCE_ /* ========== Register definition for GMAC peripheral ========== */ #if (defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) #define REG_GMAC_NCR (0x40050000U) /**< \brief (GMAC) Network Control Register */ #define REG_GMAC_NCFGR (0x40050004U) /**< \brief (GMAC) Network Configuration Register */ #define REG_GMAC_NSR (0x40050008U) /**< \brief (GMAC) Network Status Register */ #define REG_GMAC_UR (0x4005000CU) /**< \brief (GMAC) User Register */ #define REG_GMAC_DCFGR (0x40050010U) /**< \brief (GMAC) DMA Configuration Register */ #define REG_GMAC_TSR (0x40050014U) /**< \brief (GMAC) Transmit Status Register */ #define REG_GMAC_RBQB (0x40050018U) /**< \brief (GMAC) Receive Buffer Queue Base Address Register */ #define REG_GMAC_TBQB (0x4005001CU) /**< \brief (GMAC) Transmit Buffer Queue Base Address Register */ #define REG_GMAC_RSR (0x40050020U) /**< \brief (GMAC) Receive Status Register */ #define REG_GMAC_ISR (0x40050024U) /**< \brief (GMAC) Interrupt Status Register */ #define REG_GMAC_IER (0x40050028U) /**< \brief (GMAC) Interrupt Enable Register */ #define REG_GMAC_IDR (0x4005002CU) /**< \brief (GMAC) Interrupt Disable Register */ #define REG_GMAC_IMR (0x40050030U) /**< \brief (GMAC) Interrupt Mask Register */ #define REG_GMAC_MAN (0x40050034U) /**< \brief (GMAC) PHY Maintenance Register */ #define REG_GMAC_RPQ (0x40050038U) /**< \brief (GMAC) Received Pause Quantum Register */ #define REG_GMAC_TPQ (0x4005003CU) /**< \brief (GMAC) Transmit Pause Quantum Register */ #define REG_GMAC_TPSF (0x40050040U) /**< \brief (GMAC) TX Partial Store and Forward Register */ #define REG_GMAC_RPSF (0x40050044U) /**< \brief (GMAC) RX Partial Store and Forward Register */ #define REG_GMAC_RJFML (0x40050048U) /**< \brief (GMAC) RX Jumbo Frame Max Length Register */ #define REG_GMAC_HRB (0x40050080U) /**< \brief (GMAC) Hash Register Bottom */ #define REG_GMAC_HRT (0x40050084U) /**< \brief (GMAC) Hash Register Top */ #define REG_GMAC_SAB1 (0x40050088U) /**< \brief (GMAC) Specific Address 1 Bottom Register */ #define REG_GMAC_SAT1 (0x4005008CU) /**< \brief (GMAC) Specific Address 1 Top Register */ #define REG_GMAC_SAB2 (0x40050090U) /**< \brief (GMAC) Specific Address 2 Bottom Register */ #define REG_GMAC_SAT2 (0x40050094U) /**< \brief (GMAC) Specific Address 2 Top Register */ #define REG_GMAC_SAB3 (0x40050098U) /**< \brief (GMAC) Specific Address 3 Bottom Register */ #define REG_GMAC_SAT3 (0x4005009CU) /**< \brief (GMAC) Specific Address 3 Top Register */ #define REG_GMAC_SAB4 (0x400500A0U) /**< \brief (GMAC) Specific Address 4 Bottom Register */ #define REG_GMAC_SAT4 (0x400500A4U) /**< \brief (GMAC) Specific Address 4 Top Register */ #define REG_GMAC_TIDM1 (0x400500A8U) /**< \brief (GMAC) Type ID Match 1 Register */ #define REG_GMAC_TIDM2 (0x400500ACU) /**< \brief (GMAC) Type ID Match 2 Register */ #define REG_GMAC_TIDM3 (0x400500B0U) /**< \brief (GMAC) Type ID Match 3 Register */ #define REG_GMAC_TIDM4 (0x400500B4U) /**< \brief (GMAC) Type ID Match 4 Register */ #define REG_GMAC_WOL (0x400500B8U) /**< \brief (GMAC) Wake on LAN Register */ #define REG_GMAC_IPGS (0x400500BCU) /**< \brief (GMAC) IPG Stretch Register */ #define REG_GMAC_SVLAN (0x400500C0U) /**< \brief (GMAC) Stacked VLAN Register */ #define REG_GMAC_TPFCP (0x400500C4U) /**< \brief (GMAC) Transmit PFC Pause Register */ #define REG_GMAC_SAMB1 (0x400500C8U) /**< \brief (GMAC) Specific Address 1 Mask Bottom Register */ #define REG_GMAC_SAMT1 (0x400500CCU) /**< \brief (GMAC) Specific Address 1 Mask Top Register */ #define REG_GMAC_NSC (0x400500DCU) /**< \brief (GMAC) 1588 Timer Nanosecond Comparison Register */ #define REG_GMAC_SCL (0x400500E0U) /**< \brief (GMAC) 1588 Timer Second Comparison Low Register */ #define REG_GMAC_SCH (0x400500E4U) /**< \brief (GMAC) 1588 Timer Second Comparison High Register */ #define REG_GMAC_EFTSH (0x400500E8U) /**< \brief (GMAC) PTP Event Frame Transmitted Seconds High Register */ #define REG_GMAC_EFRSH (0x400500ECU) /**< \brief (GMAC) PTP Event Frame Received Seconds High Register */ #define REG_GMAC_PEFTSH (0x400500F0U) /**< \brief (GMAC) PTP Peer Event Frame Transmitted Seconds High Register */ #define REG_GMAC_PEFRSH (0x400500F4U) /**< \brief (GMAC) PTP Peer Event Frame Received Seconds High Register */ #define REG_GMAC_OTLO (0x40050100U) /**< \brief (GMAC) Octets Transmitted Low Register */ #define REG_GMAC_OTHI (0x40050104U) /**< \brief (GMAC) Octets Transmitted High Register */ #define REG_GMAC_FT (0x40050108U) /**< \brief (GMAC) Frames Transmitted Register */ #define REG_GMAC_BCFT (0x4005010CU) /**< \brief (GMAC) Broadcast Frames Transmitted Register */ #define REG_GMAC_MFT (0x40050110U) /**< \brief (GMAC) Multicast Frames Transmitted Register */ #define REG_GMAC_PFT (0x40050114U) /**< \brief (GMAC) Pause Frames Transmitted Register */ #define REG_GMAC_BFT64 (0x40050118U) /**< \brief (GMAC) 64 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT127 (0x4005011CU) /**< \brief (GMAC) 65 to 127 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT255 (0x40050120U) /**< \brief (GMAC) 128 to 255 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT511 (0x40050124U) /**< \brief (GMAC) 256 to 511 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT1023 (0x40050128U) /**< \brief (GMAC) 512 to 1023 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT1518 (0x4005012CU) /**< \brief (GMAC) 1024 to 1518 Byte Frames Transmitted Register */ #define REG_GMAC_GTBFT1518 (0x40050130U) /**< \brief (GMAC) Greater Than 1518 Byte Frames Transmitted Register */ #define REG_GMAC_TUR (0x40050134U) /**< \brief (GMAC) Transmit Underruns Register */ #define REG_GMAC_SCF (0x40050138U) /**< \brief (GMAC) Single Collision Frames Register */ #define REG_GMAC_MCF (0x4005013CU) /**< \brief (GMAC) Multiple Collision Frames Register */ #define REG_GMAC_EC (0x40050140U) /**< \brief (GMAC) Excessive Collisions Register */ #define REG_GMAC_LC (0x40050144U) /**< \brief (GMAC) Late Collisions Register */ #define REG_GMAC_DTF (0x40050148U) /**< \brief (GMAC) Deferred Transmission Frames Register */ #define REG_GMAC_CSE (0x4005014CU) /**< \brief (GMAC) Carrier Sense Errors Register Register */ #define REG_GMAC_ORLO (0x40050150U) /**< \brief (GMAC) Octets Received Low Received Register */ #define REG_GMAC_ORHI (0x40050154U) /**< \brief (GMAC) Octets Received High Received Register */ #define REG_GMAC_FR (0x40050158U) /**< \brief (GMAC) Frames Received Register */ #define REG_GMAC_BCFR (0x4005015CU) /**< \brief (GMAC) Broadcast Frames Received Register */ #define REG_GMAC_MFR (0x40050160U) /**< \brief (GMAC) Multicast Frames Received Register */ #define REG_GMAC_PFR (0x40050164U) /**< \brief (GMAC) Pause Frames Received Register */ #define REG_GMAC_BFR64 (0x40050168U) /**< \brief (GMAC) 64 Byte Frames Received Register */ #define REG_GMAC_TBFR127 (0x4005016CU) /**< \brief (GMAC) 65 to 127 Byte Frames Received Register */ #define REG_GMAC_TBFR255 (0x40050170U) /**< \brief (GMAC) 128 to 255 Byte Frames Received Register */ #define REG_GMAC_TBFR511 (0x40050174U) /**< \brief (GMAC) 256 to 511 Byte Frames Received Register */ #define REG_GMAC_TBFR1023 (0x40050178U) /**< \brief (GMAC) 512 to 1023 Byte Frames Received Register */ #define REG_GMAC_TBFR1518 (0x4005017CU) /**< \brief (GMAC) 1024 to 1518 Byte Frames Received Register */ #define REG_GMAC_TMXBFR (0x40050180U) /**< \brief (GMAC) 1519 to Maximum Byte Frames Received Register */ #define REG_GMAC_UFR (0x40050184U) /**< \brief (GMAC) Undersize Frames Received Register */ #define REG_GMAC_OFR (0x40050188U) /**< \brief (GMAC) Oversize Frames Received Register */ #define REG_GMAC_JR (0x4005018CU) /**< \brief (GMAC) Jabbers Received Register */ #define REG_GMAC_FCSE (0x40050190U) /**< \brief (GMAC) Frame Check Sequence Errors Register */ #define REG_GMAC_LFFE (0x40050194U) /**< \brief (GMAC) Length Field Frame Errors Register */ #define REG_GMAC_RSE (0x40050198U) /**< \brief (GMAC) Receive Symbol Errors Register */ #define REG_GMAC_AE (0x4005019CU) /**< \brief (GMAC) Alignment Errors Register */ #define REG_GMAC_RRE (0x400501A0U) /**< \brief (GMAC) Receive Resource Errors Register */ #define REG_GMAC_ROE (0x400501A4U) /**< \brief (GMAC) Receive Overrun Register */ #define REG_GMAC_IHCE (0x400501A8U) /**< \brief (GMAC) IP Header Checksum Errors Register */ #define REG_GMAC_TCE (0x400501ACU) /**< \brief (GMAC) TCP Checksum Errors Register */ #define REG_GMAC_UCE (0x400501B0U) /**< \brief (GMAC) UDP Checksum Errors Register */ #define REG_GMAC_TISUBN (0x400501BCU) /**< \brief (GMAC) 1588 Timer Increment Sub-nanoseconds Register */ #define REG_GMAC_TSH (0x400501C0U) /**< \brief (GMAC) 1588 Timer Seconds High Register */ #define REG_GMAC_TSL (0x400501D0U) /**< \brief (GMAC) 1588 Timer Seconds Low Register */ #define REG_GMAC_TN (0x400501D4U) /**< \brief (GMAC) 1588 Timer Nanoseconds Register */ #define REG_GMAC_TA (0x400501D8U) /**< \brief (GMAC) 1588 Timer Adjust Register */ #define REG_GMAC_TI (0x400501DCU) /**< \brief (GMAC) 1588 Timer Increment Register */ #define REG_GMAC_EFTSL (0x400501E0U) /**< \brief (GMAC) PTP Event Frame Transmitted Seconds Low Register */ #define REG_GMAC_EFTN (0x400501E4U) /**< \brief (GMAC) PTP Event Frame Transmitted Nanoseconds Register */ #define REG_GMAC_EFRSL (0x400501E8U) /**< \brief (GMAC) PTP Event Frame Received Seconds Low Register */ #define REG_GMAC_EFRN (0x400501ECU) /**< \brief (GMAC) PTP Event Frame Received Nanoseconds Register */ #define REG_GMAC_PEFTSL (0x400501F0U) /**< \brief (GMAC) PTP Peer Event Frame Transmitted Seconds Low Register */ #define REG_GMAC_PEFTN (0x400501F4U) /**< \brief (GMAC) PTP Peer Event Frame Transmitted Nanoseconds Register */ #define REG_GMAC_PEFRSL (0x400501F8U) /**< \brief (GMAC) PTP Peer Event Frame Received Seconds Low Register */ #define REG_GMAC_PEFRN (0x400501FCU) /**< \brief (GMAC) PTP Peer Event Frame Received Nanoseconds Register */ #define REG_GMAC_ISRPQ (0x40050400U) /**< \brief (GMAC) Interrupt Status Register Priority Queue (index = 1) */ #define REG_GMAC_TBQBAPQ (0x40050440U) /**< \brief (GMAC) Transmit Buffer Queue Base Address Register Priority Queue (index = 1) */ #define REG_GMAC_RBQBAPQ (0x40050480U) /**< \brief (GMAC) Receive Buffer Queue Base Address Register Priority Queue (index = 1) */ #define REG_GMAC_RBSRPQ (0x400504A0U) /**< \brief (GMAC) Receive Buffer Size Register Priority Queue (index = 1) */ #define REG_GMAC_CBSCR (0x400504BCU) /**< \brief (GMAC) Credit-Based Shaping Control Register */ #define REG_GMAC_CBSISQA (0x400504C0U) /**< \brief (GMAC) Credit-Based Shaping IdleSlope Register for Queue A */ #define REG_GMAC_CBSISQB (0x400504C4U) /**< \brief (GMAC) Credit-Based Shaping IdleSlope Register for Queue B */ #define REG_GMAC_ST1RPQ (0x40050500U) /**< \brief (GMAC) Screening Type 1 Register Priority Queue (index = 0) */ #define REG_GMAC_ST2RPQ (0x40050540U) /**< \brief (GMAC) Screening Type 2 Register Priority Queue (index = 0) */ #define REG_GMAC_IERPQ (0x40050600U) /**< \brief (GMAC) Interrupt Enable Register Priority Queue (index = 1) */ #define REG_GMAC_IDRPQ (0x40050620U) /**< \brief (GMAC) Interrupt Disable Register Priority Queue (index = 1) */ #define REG_GMAC_IMRPQ (0x40050640U) /**< \brief (GMAC) Interrupt Mask Register Priority Queue (index = 1) */ #define REG_GMAC_ST2ER (0x400506E0U) /**< \brief (GMAC) Screening Type 2 Ethertype Register (index = 0) */ #define REG_GMAC_ST2CW00 (0x40050700U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 0) */ #define REG_GMAC_ST2CW10 (0x40050704U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 0) */ #define REG_GMAC_ST2CW01 (0x40050708U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 1) */ #define REG_GMAC_ST2CW11 (0x4005070CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 1) */ #define REG_GMAC_ST2CW02 (0x40050710U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 2) */ #define REG_GMAC_ST2CW12 (0x40050714U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 2) */ #define REG_GMAC_ST2CW03 (0x40050718U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 3) */ #define REG_GMAC_ST2CW13 (0x4005071CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 3) */ #define REG_GMAC_ST2CW04 (0x40050720U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 4) */ #define REG_GMAC_ST2CW14 (0x40050724U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 4) */ #define REG_GMAC_ST2CW05 (0x40050728U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 5) */ #define REG_GMAC_ST2CW15 (0x4005072CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 5) */ #define REG_GMAC_ST2CW06 (0x40050730U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 6) */ #define REG_GMAC_ST2CW16 (0x40050734U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 6) */ #define REG_GMAC_ST2CW07 (0x40050738U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 7) */ #define REG_GMAC_ST2CW17 (0x4005073CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 7) */ #define REG_GMAC_ST2CW08 (0x40050740U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 8) */ #define REG_GMAC_ST2CW18 (0x40050744U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 8) */ #define REG_GMAC_ST2CW09 (0x40050748U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 9) */ #define REG_GMAC_ST2CW19 (0x4005074CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 9) */ #define REG_GMAC_ST2CW010 (0x40050750U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 10) */ #define REG_GMAC_ST2CW110 (0x40050754U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 10) */ #define REG_GMAC_ST2CW011 (0x40050758U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 11) */ #define REG_GMAC_ST2CW111 (0x4005075CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 11) */ #define REG_GMAC_ST2CW012 (0x40050760U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 12) */ #define REG_GMAC_ST2CW112 (0x40050764U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 12) */ #define REG_GMAC_ST2CW013 (0x40050768U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 13) */ #define REG_GMAC_ST2CW113 (0x4005076CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 13) */ #define REG_GMAC_ST2CW014 (0x40050770U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 14) */ #define REG_GMAC_ST2CW114 (0x40050774U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 14) */ #define REG_GMAC_ST2CW015 (0x40050778U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 15) */ #define REG_GMAC_ST2CW115 (0x4005077CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 15) */ #define REG_GMAC_ST2CW016 (0x40050780U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 16) */ #define REG_GMAC_ST2CW116 (0x40050784U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 16) */ #define REG_GMAC_ST2CW017 (0x40050788U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 17) */ #define REG_GMAC_ST2CW117 (0x4005078CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 17) */ #define REG_GMAC_ST2CW018 (0x40050790U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 18) */ #define REG_GMAC_ST2CW118 (0x40050794U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 18) */ #define REG_GMAC_ST2CW019 (0x40050798U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 19) */ #define REG_GMAC_ST2CW119 (0x4005079CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 19) */ #define REG_GMAC_ST2CW020 (0x400507A0U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 20) */ #define REG_GMAC_ST2CW120 (0x400507A4U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 20) */ #define REG_GMAC_ST2CW021 (0x400507A8U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 21) */ #define REG_GMAC_ST2CW121 (0x400507ACU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 21) */ #define REG_GMAC_ST2CW022 (0x400507B0U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 22) */ #define REG_GMAC_ST2CW122 (0x400507B4U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 22) */ #define REG_GMAC_ST2CW023 (0x400507B8U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 23) */ #define REG_GMAC_ST2CW123 (0x400507BCU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 23) */ #else #define REG_GMAC_NCR (*(__IO uint32_t*)0x40050000U) /**< \brief (GMAC) Network Control Register */ #define REG_GMAC_NCFGR (*(__IO uint32_t*)0x40050004U) /**< \brief (GMAC) Network Configuration Register */ #define REG_GMAC_NSR (*(__I uint32_t*)0x40050008U) /**< \brief (GMAC) Network Status Register */ #define REG_GMAC_UR (*(__IO uint32_t*)0x4005000CU) /**< \brief (GMAC) User Register */ #define REG_GMAC_DCFGR (*(__IO uint32_t*)0x40050010U) /**< \brief (GMAC) DMA Configuration Register */ #define REG_GMAC_TSR (*(__IO uint32_t*)0x40050014U) /**< \brief (GMAC) Transmit Status Register */ #define REG_GMAC_RBQB (*(__IO uint32_t*)0x40050018U) /**< \brief (GMAC) Receive Buffer Queue Base Address Register */ #define REG_GMAC_TBQB (*(__IO uint32_t*)0x4005001CU) /**< \brief (GMAC) Transmit Buffer Queue Base Address Register */ #define REG_GMAC_RSR (*(__IO uint32_t*)0x40050020U) /**< \brief (GMAC) Receive Status Register */ #define REG_GMAC_ISR (*(__I uint32_t*)0x40050024U) /**< \brief (GMAC) Interrupt Status Register */ #define REG_GMAC_IER (*(__O uint32_t*)0x40050028U) /**< \brief (GMAC) Interrupt Enable Register */ #define REG_GMAC_IDR (*(__O uint32_t*)0x4005002CU) /**< \brief (GMAC) Interrupt Disable Register */ #define REG_GMAC_IMR (*(__IO uint32_t*)0x40050030U) /**< \brief (GMAC) Interrupt Mask Register */ #define REG_GMAC_MAN (*(__IO uint32_t*)0x40050034U) /**< \brief (GMAC) PHY Maintenance Register */ #define REG_GMAC_RPQ (*(__I uint32_t*)0x40050038U) /**< \brief (GMAC) Received Pause Quantum Register */ #define REG_GMAC_TPQ (*(__IO uint32_t*)0x4005003CU) /**< \brief (GMAC) Transmit Pause Quantum Register */ #define REG_GMAC_TPSF (*(__IO uint32_t*)0x40050040U) /**< \brief (GMAC) TX Partial Store and Forward Register */ #define REG_GMAC_RPSF (*(__IO uint32_t*)0x40050044U) /**< \brief (GMAC) RX Partial Store and Forward Register */ #define REG_GMAC_RJFML (*(__IO uint32_t*)0x40050048U) /**< \brief (GMAC) RX Jumbo Frame Max Length Register */ #define REG_GMAC_HRB (*(__IO uint32_t*)0x40050080U) /**< \brief (GMAC) Hash Register Bottom */ #define REG_GMAC_HRT (*(__IO uint32_t*)0x40050084U) /**< \brief (GMAC) Hash Register Top */ #define REG_GMAC_SAB1 (*(__IO uint32_t*)0x40050088U) /**< \brief (GMAC) Specific Address 1 Bottom Register */ #define REG_GMAC_SAT1 (*(__IO uint32_t*)0x4005008CU) /**< \brief (GMAC) Specific Address 1 Top Register */ #define REG_GMAC_SAB2 (*(__IO uint32_t*)0x40050090U) /**< \brief (GMAC) Specific Address 2 Bottom Register */ #define REG_GMAC_SAT2 (*(__IO uint32_t*)0x40050094U) /**< \brief (GMAC) Specific Address 2 Top Register */ #define REG_GMAC_SAB3 (*(__IO uint32_t*)0x40050098U) /**< \brief (GMAC) Specific Address 3 Bottom Register */ #define REG_GMAC_SAT3 (*(__IO uint32_t*)0x4005009CU) /**< \brief (GMAC) Specific Address 3 Top Register */ #define REG_GMAC_SAB4 (*(__IO uint32_t*)0x400500A0U) /**< \brief (GMAC) Specific Address 4 Bottom Register */ #define REG_GMAC_SAT4 (*(__IO uint32_t*)0x400500A4U) /**< \brief (GMAC) Specific Address 4 Top Register */ #define REG_GMAC_TIDM1 (*(__IO uint32_t*)0x400500A8U) /**< \brief (GMAC) Type ID Match 1 Register */ #define REG_GMAC_TIDM2 (*(__IO uint32_t*)0x400500ACU) /**< \brief (GMAC) Type ID Match 2 Register */ #define REG_GMAC_TIDM3 (*(__IO uint32_t*)0x400500B0U) /**< \brief (GMAC) Type ID Match 3 Register */ #define REG_GMAC_TIDM4 (*(__IO uint32_t*)0x400500B4U) /**< \brief (GMAC) Type ID Match 4 Register */ #define REG_GMAC_WOL (*(__IO uint32_t*)0x400500B8U) /**< \brief (GMAC) Wake on LAN Register */ #define REG_GMAC_IPGS (*(__IO uint32_t*)0x400500BCU) /**< \brief (GMAC) IPG Stretch Register */ #define REG_GMAC_SVLAN (*(__IO uint32_t*)0x400500C0U) /**< \brief (GMAC) Stacked VLAN Register */ #define REG_GMAC_TPFCP (*(__IO uint32_t*)0x400500C4U) /**< \brief (GMAC) Transmit PFC Pause Register */ #define REG_GMAC_SAMB1 (*(__IO uint32_t*)0x400500C8U) /**< \brief (GMAC) Specific Address 1 Mask Bottom Register */ #define REG_GMAC_SAMT1 (*(__IO uint32_t*)0x400500CCU) /**< \brief (GMAC) Specific Address 1 Mask Top Register */ #define REG_GMAC_NSC (*(__IO uint32_t*)0x400500DCU) /**< \brief (GMAC) 1588 Timer Nanosecond Comparison Register */ #define REG_GMAC_SCL (*(__IO uint32_t*)0x400500E0U) /**< \brief (GMAC) 1588 Timer Second Comparison Low Register */ #define REG_GMAC_SCH (*(__IO uint32_t*)0x400500E4U) /**< \brief (GMAC) 1588 Timer Second Comparison High Register */ #define REG_GMAC_EFTSH (*(__I uint32_t*)0x400500E8U) /**< \brief (GMAC) PTP Event Frame Transmitted Seconds High Register */ #define REG_GMAC_EFRSH (*(__I uint32_t*)0x400500ECU) /**< \brief (GMAC) PTP Event Frame Received Seconds High Register */ #define REG_GMAC_PEFTSH (*(__I uint32_t*)0x400500F0U) /**< \brief (GMAC) PTP Peer Event Frame Transmitted Seconds High Register */ #define REG_GMAC_PEFRSH (*(__I uint32_t*)0x400500F4U) /**< \brief (GMAC) PTP Peer Event Frame Received Seconds High Register */ #define REG_GMAC_OTLO (*(__I uint32_t*)0x40050100U) /**< \brief (GMAC) Octets Transmitted Low Register */ #define REG_GMAC_OTHI (*(__I uint32_t*)0x40050104U) /**< \brief (GMAC) Octets Transmitted High Register */ #define REG_GMAC_FT (*(__I uint32_t*)0x40050108U) /**< \brief (GMAC) Frames Transmitted Register */ #define REG_GMAC_BCFT (*(__I uint32_t*)0x4005010CU) /**< \brief (GMAC) Broadcast Frames Transmitted Register */ #define REG_GMAC_MFT (*(__I uint32_t*)0x40050110U) /**< \brief (GMAC) Multicast Frames Transmitted Register */ #define REG_GMAC_PFT (*(__I uint32_t*)0x40050114U) /**< \brief (GMAC) Pause Frames Transmitted Register */ #define REG_GMAC_BFT64 (*(__I uint32_t*)0x40050118U) /**< \brief (GMAC) 64 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT127 (*(__I uint32_t*)0x4005011CU) /**< \brief (GMAC) 65 to 127 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT255 (*(__I uint32_t*)0x40050120U) /**< \brief (GMAC) 128 to 255 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT511 (*(__I uint32_t*)0x40050124U) /**< \brief (GMAC) 256 to 511 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT1023 (*(__I uint32_t*)0x40050128U) /**< \brief (GMAC) 512 to 1023 Byte Frames Transmitted Register */ #define REG_GMAC_TBFT1518 (*(__I uint32_t*)0x4005012CU) /**< \brief (GMAC) 1024 to 1518 Byte Frames Transmitted Register */ #define REG_GMAC_GTBFT1518 (*(__I uint32_t*)0x40050130U) /**< \brief (GMAC) Greater Than 1518 Byte Frames Transmitted Register */ #define REG_GMAC_TUR (*(__I uint32_t*)0x40050134U) /**< \brief (GMAC) Transmit Underruns Register */ #define REG_GMAC_SCF (*(__I uint32_t*)0x40050138U) /**< \brief (GMAC) Single Collision Frames Register */ #define REG_GMAC_MCF (*(__I uint32_t*)0x4005013CU) /**< \brief (GMAC) Multiple Collision Frames Register */ #define REG_GMAC_EC (*(__I uint32_t*)0x40050140U) /**< \brief (GMAC) Excessive Collisions Register */ #define REG_GMAC_LC (*(__I uint32_t*)0x40050144U) /**< \brief (GMAC) Late Collisions Register */ #define REG_GMAC_DTF (*(__I uint32_t*)0x40050148U) /**< \brief (GMAC) Deferred Transmission Frames Register */ #define REG_GMAC_CSE (*(__I uint32_t*)0x4005014CU) /**< \brief (GMAC) Carrier Sense Errors Register Register */ #define REG_GMAC_ORLO (*(__I uint32_t*)0x40050150U) /**< \brief (GMAC) Octets Received Low Received Register */ #define REG_GMAC_ORHI (*(__I uint32_t*)0x40050154U) /**< \brief (GMAC) Octets Received High Received Register */ #define REG_GMAC_FR (*(__I uint32_t*)0x40050158U) /**< \brief (GMAC) Frames Received Register */ #define REG_GMAC_BCFR (*(__I uint32_t*)0x4005015CU) /**< \brief (GMAC) Broadcast Frames Received Register */ #define REG_GMAC_MFR (*(__I uint32_t*)0x40050160U) /**< \brief (GMAC) Multicast Frames Received Register */ #define REG_GMAC_PFR (*(__I uint32_t*)0x40050164U) /**< \brief (GMAC) Pause Frames Received Register */ #define REG_GMAC_BFR64 (*(__I uint32_t*)0x40050168U) /**< \brief (GMAC) 64 Byte Frames Received Register */ #define REG_GMAC_TBFR127 (*(__I uint32_t*)0x4005016CU) /**< \brief (GMAC) 65 to 127 Byte Frames Received Register */ #define REG_GMAC_TBFR255 (*(__I uint32_t*)0x40050170U) /**< \brief (GMAC) 128 to 255 Byte Frames Received Register */ #define REG_GMAC_TBFR511 (*(__I uint32_t*)0x40050174U) /**< \brief (GMAC) 256 to 511 Byte Frames Received Register */ #define REG_GMAC_TBFR1023 (*(__I uint32_t*)0x40050178U) /**< \brief (GMAC) 512 to 1023 Byte Frames Received Register */ #define REG_GMAC_TBFR1518 (*(__I uint32_t*)0x4005017CU) /**< \brief (GMAC) 1024 to 1518 Byte Frames Received Register */ #define REG_GMAC_TMXBFR (*(__I uint32_t*)0x40050180U) /**< \brief (GMAC) 1519 to Maximum Byte Frames Received Register */ #define REG_GMAC_UFR (*(__I uint32_t*)0x40050184U) /**< \brief (GMAC) Undersize Frames Received Register */ #define REG_GMAC_OFR (*(__I uint32_t*)0x40050188U) /**< \brief (GMAC) Oversize Frames Received Register */ #define REG_GMAC_JR (*(__I uint32_t*)0x4005018CU) /**< \brief (GMAC) Jabbers Received Register */ #define REG_GMAC_FCSE (*(__I uint32_t*)0x40050190U) /**< \brief (GMAC) Frame Check Sequence Errors Register */ #define REG_GMAC_LFFE (*(__I uint32_t*)0x40050194U) /**< \brief (GMAC) Length Field Frame Errors Register */ #define REG_GMAC_RSE (*(__I uint32_t*)0x40050198U) /**< \brief (GMAC) Receive Symbol Errors Register */ #define REG_GMAC_AE (*(__I uint32_t*)0x4005019CU) /**< \brief (GMAC) Alignment Errors Register */ #define REG_GMAC_RRE (*(__I uint32_t*)0x400501A0U) /**< \brief (GMAC) Receive Resource Errors Register */ #define REG_GMAC_ROE (*(__I uint32_t*)0x400501A4U) /**< \brief (GMAC) Receive Overrun Register */ #define REG_GMAC_IHCE (*(__I uint32_t*)0x400501A8U) /**< \brief (GMAC) IP Header Checksum Errors Register */ #define REG_GMAC_TCE (*(__I uint32_t*)0x400501ACU) /**< \brief (GMAC) TCP Checksum Errors Register */ #define REG_GMAC_UCE (*(__I uint32_t*)0x400501B0U) /**< \brief (GMAC) UDP Checksum Errors Register */ #define REG_GMAC_TISUBN (*(__IO uint32_t*)0x400501BCU) /**< \brief (GMAC) 1588 Timer Increment Sub-nanoseconds Register */ #define REG_GMAC_TSH (*(__IO uint32_t*)0x400501C0U) /**< \brief (GMAC) 1588 Timer Seconds High Register */ #define REG_GMAC_TSL (*(__IO uint32_t*)0x400501D0U) /**< \brief (GMAC) 1588 Timer Seconds Low Register */ #define REG_GMAC_TN (*(__IO uint32_t*)0x400501D4U) /**< \brief (GMAC) 1588 Timer Nanoseconds Register */ #define REG_GMAC_TA (*(__O uint32_t*)0x400501D8U) /**< \brief (GMAC) 1588 Timer Adjust Register */ #define REG_GMAC_TI (*(__IO uint32_t*)0x400501DCU) /**< \brief (GMAC) 1588 Timer Increment Register */ #define REG_GMAC_EFTSL (*(__I uint32_t*)0x400501E0U) /**< \brief (GMAC) PTP Event Frame Transmitted Seconds Low Register */ #define REG_GMAC_EFTN (*(__I uint32_t*)0x400501E4U) /**< \brief (GMAC) PTP Event Frame Transmitted Nanoseconds Register */ #define REG_GMAC_EFRSL (*(__I uint32_t*)0x400501E8U) /**< \brief (GMAC) PTP Event Frame Received Seconds Low Register */ #define REG_GMAC_EFRN (*(__I uint32_t*)0x400501ECU) /**< \brief (GMAC) PTP Event Frame Received Nanoseconds Register */ #define REG_GMAC_PEFTSL (*(__I uint32_t*)0x400501F0U) /**< \brief (GMAC) PTP Peer Event Frame Transmitted Seconds Low Register */ #define REG_GMAC_PEFTN (*(__I uint32_t*)0x400501F4U) /**< \brief (GMAC) PTP Peer Event Frame Transmitted Nanoseconds Register */ #define REG_GMAC_PEFRSL (*(__I uint32_t*)0x400501F8U) /**< \brief (GMAC) PTP Peer Event Frame Received Seconds Low Register */ #define REG_GMAC_PEFRN (*(__I uint32_t*)0x400501FCU) /**< \brief (GMAC) PTP Peer Event Frame Received Nanoseconds Register */ #define REG_GMAC_ISRPQ (*(__I uint32_t*)0x40050400U) /**< \brief (GMAC) Interrupt Status Register Priority Queue (index = 1) */ #define REG_GMAC_TBQBAPQ (*(__IO uint32_t*)0x40050440U) /**< \brief (GMAC) Transmit Buffer Queue Base Address Register Priority Queue (index = 1) */ #define REG_GMAC_RBQBAPQ (*(__IO uint32_t*)0x40050480U) /**< \brief (GMAC) Receive Buffer Queue Base Address Register Priority Queue (index = 1) */ #define REG_GMAC_RBSRPQ (*(__IO uint32_t*)0x400504A0U) /**< \brief (GMAC) Receive Buffer Size Register Priority Queue (index = 1) */ #define REG_GMAC_CBSCR (*(__IO uint32_t*)0x400504BCU) /**< \brief (GMAC) Credit-Based Shaping Control Register */ #define REG_GMAC_CBSISQA (*(__IO uint32_t*)0x400504C0U) /**< \brief (GMAC) Credit-Based Shaping IdleSlope Register for Queue A */ #define REG_GMAC_CBSISQB (*(__IO uint32_t*)0x400504C4U) /**< \brief (GMAC) Credit-Based Shaping IdleSlope Register for Queue B */ #define REG_GMAC_ST1RPQ (*(__IO uint32_t*)0x40050500U) /**< \brief (GMAC) Screening Type 1 Register Priority Queue (index = 0) */ #define REG_GMAC_ST2RPQ (*(__IO uint32_t*)0x40050540U) /**< \brief (GMAC) Screening Type 2 Register Priority Queue (index = 0) */ #define REG_GMAC_IERPQ (*(__O uint32_t*)0x40050600U) /**< \brief (GMAC) Interrupt Enable Register Priority Queue (index = 1) */ #define REG_GMAC_IDRPQ (*(__O uint32_t*)0x40050620U) /**< \brief (GMAC) Interrupt Disable Register Priority Queue (index = 1) */ #define REG_GMAC_IMRPQ (*(__IO uint32_t*)0x40050640U) /**< \brief (GMAC) Interrupt Mask Register Priority Queue (index = 1) */ #define REG_GMAC_ST2ER (*(__IO uint32_t*)0x400506E0U) /**< \brief (GMAC) Screening Type 2 Ethertype Register (index = 0) */ #define REG_GMAC_ST2CW00 (*(__IO uint32_t*)0x40050700U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 0) */ #define REG_GMAC_ST2CW10 (*(__IO uint32_t*)0x40050704U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 0) */ #define REG_GMAC_ST2CW01 (*(__IO uint32_t*)0x40050708U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 1) */ #define REG_GMAC_ST2CW11 (*(__IO uint32_t*)0x4005070CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 1) */ #define REG_GMAC_ST2CW02 (*(__IO uint32_t*)0x40050710U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 2) */ #define REG_GMAC_ST2CW12 (*(__IO uint32_t*)0x40050714U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 2) */ #define REG_GMAC_ST2CW03 (*(__IO uint32_t*)0x40050718U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 3) */ #define REG_GMAC_ST2CW13 (*(__IO uint32_t*)0x4005071CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 3) */ #define REG_GMAC_ST2CW04 (*(__IO uint32_t*)0x40050720U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 4) */ #define REG_GMAC_ST2CW14 (*(__IO uint32_t*)0x40050724U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 4) */ #define REG_GMAC_ST2CW05 (*(__IO uint32_t*)0x40050728U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 5) */ #define REG_GMAC_ST2CW15 (*(__IO uint32_t*)0x4005072CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 5) */ #define REG_GMAC_ST2CW06 (*(__IO uint32_t*)0x40050730U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 6) */ #define REG_GMAC_ST2CW16 (*(__IO uint32_t*)0x40050734U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 6) */ #define REG_GMAC_ST2CW07 (*(__IO uint32_t*)0x40050738U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 7) */ #define REG_GMAC_ST2CW17 (*(__IO uint32_t*)0x4005073CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 7) */ #define REG_GMAC_ST2CW08 (*(__IO uint32_t*)0x40050740U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 8) */ #define REG_GMAC_ST2CW18 (*(__IO uint32_t*)0x40050744U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 8) */ #define REG_GMAC_ST2CW09 (*(__IO uint32_t*)0x40050748U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 9) */ #define REG_GMAC_ST2CW19 (*(__IO uint32_t*)0x4005074CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 9) */ #define REG_GMAC_ST2CW010 (*(__IO uint32_t*)0x40050750U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 10) */ #define REG_GMAC_ST2CW110 (*(__IO uint32_t*)0x40050754U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 10) */ #define REG_GMAC_ST2CW011 (*(__IO uint32_t*)0x40050758U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 11) */ #define REG_GMAC_ST2CW111 (*(__IO uint32_t*)0x4005075CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 11) */ #define REG_GMAC_ST2CW012 (*(__IO uint32_t*)0x40050760U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 12) */ #define REG_GMAC_ST2CW112 (*(__IO uint32_t*)0x40050764U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 12) */ #define REG_GMAC_ST2CW013 (*(__IO uint32_t*)0x40050768U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 13) */ #define REG_GMAC_ST2CW113 (*(__IO uint32_t*)0x4005076CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 13) */ #define REG_GMAC_ST2CW014 (*(__IO uint32_t*)0x40050770U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 14) */ #define REG_GMAC_ST2CW114 (*(__IO uint32_t*)0x40050774U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 14) */ #define REG_GMAC_ST2CW015 (*(__IO uint32_t*)0x40050778U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 15) */ #define REG_GMAC_ST2CW115 (*(__IO uint32_t*)0x4005077CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 15) */ #define REG_GMAC_ST2CW016 (*(__IO uint32_t*)0x40050780U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 16) */ #define REG_GMAC_ST2CW116 (*(__IO uint32_t*)0x40050784U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 16) */ #define REG_GMAC_ST2CW017 (*(__IO uint32_t*)0x40050788U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 17) */ #define REG_GMAC_ST2CW117 (*(__IO uint32_t*)0x4005078CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 17) */ #define REG_GMAC_ST2CW018 (*(__IO uint32_t*)0x40050790U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 18) */ #define REG_GMAC_ST2CW118 (*(__IO uint32_t*)0x40050794U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 18) */ #define REG_GMAC_ST2CW019 (*(__IO uint32_t*)0x40050798U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 19) */ #define REG_GMAC_ST2CW119 (*(__IO uint32_t*)0x4005079CU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 19) */ #define REG_GMAC_ST2CW020 (*(__IO uint32_t*)0x400507A0U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 20) */ #define REG_GMAC_ST2CW120 (*(__IO uint32_t*)0x400507A4U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 20) */ #define REG_GMAC_ST2CW021 (*(__IO uint32_t*)0x400507A8U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 21) */ #define REG_GMAC_ST2CW121 (*(__IO uint32_t*)0x400507ACU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 21) */ #define REG_GMAC_ST2CW022 (*(__IO uint32_t*)0x400507B0U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 22) */ #define REG_GMAC_ST2CW122 (*(__IO uint32_t*)0x400507B4U) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 22) */ #define REG_GMAC_ST2CW023 (*(__IO uint32_t*)0x400507B8U) /**< \brief (GMAC) Screening Type 2 Compare Word 0 Register (index = 23) */ #define REG_GMAC_ST2CW123 (*(__IO uint32_t*)0x400507BCU) /**< \brief (GMAC) Screening Type 2 Compare Word 1 Register (index = 23) */ #endif /* (defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */ #endif /* _SAMV71_GMAC_INSTANCE_ */
apache-2.0
paulnovo/ITK
Modules/Video/Core/include/itkRingBuffer.h
3927
/*========================================================================= * * Copyright Insight Software Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ #ifndef __itkRingBuffer_h #define __itkRingBuffer_h #include "itkObject.h" #include "itkObjectFactory.h" #include "itkIntTypes.h" namespace itk { /** \class RingBuffer * \brief Templated ring buffer for holding anything * * This ring buffer can hold any type of itk class that supports smart * pointers. A HEAD pointer is maintained and the buffer pointers can be * accessed in order using either positive or negative offsets. The HEAD * pointer can also be moved forward or backward in the ring. * * \ingroup ITKVideoCore */ template< class TElement > class ITK_EXPORT RingBuffer : public Object { public: /**-TYPEDEFS---------------------------------------------------------------*/ /** Standard class typedefs */ typedef RingBuffer Self; typedef Object Superclass; typedef SmartPointer< Self > Pointer; typedef SmartPointer< const Self > ConstPointer; /** Contained type */ typedef TElement ElementType; typedef typename ElementType::Pointer ElementPointer; typedef ::itk::SizeValueType SizeValueType; typedef ::itk::OffsetValueType OffsetValueType; /** Method for creation through the object factory. */ itkNewMacro(Self); /** Run-time type information (and related methods). */ itkTypeMacro(RingBuffer, Object); /** Set the number of buffers * WARNING: If the size is set smaller than the current buffer size, the tail * of the buffer will be chopped off */ void SetNumberOfBuffers(SizeValueType sz); /** Get the buffer size */ SizeValueType GetNumberOfBuffers(); /** Move the Head pointer along the ring using the given offset */ void MoveHead(OffsetValueType offset); /** Convenience methods for moving Head +/- 1 */ void MoveHeadForward(); void MoveHeadBackward(); /** Report whether or not the indicated buffer is full */ bool BufferIsFull(OffsetValueType offset); /** Report the current position of Head (mostly used for testing) */ SizeValueType GetHeadIndex() { return this->m_HeadIndex; } /** Access the data from the indicated buffer */ typename ElementType::Pointer GetBufferContents(OffsetValueType offset); /** Set the buffer contents of a buffer */ void SetBufferContents(OffsetValueType offset, ElementPointer element); protected: /**-PROTECTED METHODS------------------------------------------------------*/ RingBuffer(); virtual ~RingBuffer(); void PrintSelf(std::ostream &os, Indent indent) const; /** Get the proper buffer index from an offset */ OffsetValueType GetOffsetBufferIndex(OffsetValueType offset); /**-PROTECTED MEMBERS------------------------------------------------------*/ /** Pointer to the current active buffer */ SizeValueType m_HeadIndex; /** Vector of pointers to elements */ std::vector<ElementPointer> m_PointerVector; private: RingBuffer(const Self &); // purposely not implemented void operator=(const Self &); // purposely not implemented }; // end RingBuffer class } // end namespace itk #if ITK_TEMPLATE_TXX #include "itkRingBuffer.hxx" #endif #endif
apache-2.0
jamesnetherton/wildfly-camel
itests/common/src/main/java/org/wildfly/camel/test/common/aws/SQSUtils.java
1275
/* * #%L * Wildfly Camel :: Testsuite * %% * Copyright (C) 2013 - 2017 RedHat * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wildfly.camel.test.common.aws; import com.amazonaws.services.sqs.AmazonSQSClient; import com.amazonaws.services.sqs.AmazonSQSClientBuilder; public class SQSUtils { // Attach Policy: AmazonSQSFullAccess public static AmazonSQSClient createSQSClient() { BasicCredentialsProvider credentials = BasicCredentialsProvider.standard(); AmazonSQSClient client = !credentials.isValid() ? null : (AmazonSQSClient) AmazonSQSClientBuilder.standard() .withCredentials(credentials) .withRegion("eu-west-1").build(); return client; } }
apache-2.0
maas-ufcg/manageiq
app/controllers/miq_policy_controller/policy_profiles.rb
7000
module MiqPolicyController::PolicyProfiles extend ActiveSupport::Concern def profile_edit case params[:button] when "cancel" @edit = nil @profile = MiqPolicySet.find_by_id(session[:edit][:profile_id]) if session[:edit] && session[:edit][:profile_id] if !@profile || (@profile && @profile.id.blank?) add_flash(_("Add of new %{models} was cancelled by the user") % {:models => ui_lookup(:model => "MiqPolicySet")}) else add_flash(_("Edit of %{model} \"%{name}\" was cancelled by the user") % {:model => ui_lookup(:model => "MiqPolicySet"), :name => @profile.description}) end get_node_info(x_node) replace_right_cell(@nodetype) return when "reset", nil # Reset or first time in profile_build_edit_screen @sb[:action] = "profile_edit" if params[:button] == "reset" add_flash(_("All changes have been reset"), :warning) end replace_right_cell("pp") return end # Load @edit/vars for other buttons id = params[:id] ? params[:id] : "new" return unless load_edit("profile_edit__#{id}", "replace_cell__explorer") @profile = @edit[:profile_id] ? MiqPolicySet.find_by_id(@edit[:profile_id]) : MiqPolicySet.new case params[:button] when "save", "add" assert_privileges("profile_#{@profile.id ? "edit" : "new"}") add_flash(_("%{model} must contain at least one %{field}") % {:model => ui_lookup(:model => "MiqPolicySet"), :field => ui_lookup(:model => "MiqPolicy")}, :error) if @edit[:new][:policies].length == 0 # At least one member is required profile = @profile.id.blank? ? MiqPolicySet.new : MiqPolicySet.find(@profile.id) # Get new or existing record profile.description = @edit[:new][:description] profile.notes = @edit[:new][:notes] if profile.valid? && !@flash_array && profile.save policies = profile.members # Get the sets members current = [] policies.each { |p| current.push(p.id) } # Build an array of the current policy ids mems = @edit[:new][:policies].invert # Get the ids from the member list box begin policies.each { |c| profile.remove_member(MiqPolicy.find(c)) unless mems.include?(c.id) } # Remove any policies no longer in the members list box mems.each_key { |m| profile.add_member(MiqPolicy.find(m)) unless current.include?(m) } # Add any policies not in the set rescue StandardError => bang add_flash(_("Error during 'Policy Profile %{params}': %{messages}") % {:params => params[:button], :messages => bang.message}, :error) end AuditEvent.success(build_saved_audit(profile, params[:button] == "add")) flash_key = params[:button] == "save" ? _("%{model} \"%{name}\" was saved") : _("%{model} \"%{name}\" was added") add_flash(flash_key % {:model => ui_lookup(:model => "MiqPolicySet"), :name => @edit[:new][:description]}) profile_get_info(MiqPolicySet.find(profile.id)) @edit = nil @nodetype = "pp" @new_profile_node = "pp-#{to_cid(profile.id)}" replace_right_cell("pp", [:policy_profile]) else profile.errors.each do |field, msg| add_flash("#{field.to_s.capitalize} #{msg}", :error) end replace_right_cell("pp") end when "move_right", "move_left", "move_allleft" handle_selection_buttons(:policies) session[:changed] = (@edit[:new] != @edit[:current]) replace_right_cell("pp") end end def profile_delete assert_privileges("profile_delete") profiles = [] # showing 1 policy set, delete it if params[:id].nil? || MiqPolicySet.find_by_id(params[:id]).nil? add_flash(_("%{models} no longer exists") % {:models => ui_lookup(:model => "MiqPolicySet")}, :error) else profiles.push(params[:id]) end process_profiles(profiles, "destroy") unless profiles.empty? add_flash(_("The selected %{models} was deleted") % {:models => ui_lookup(:models => "MiqPolicySet")}) if @flash_array.nil? self.x_node = @new_profile_node = 'root' get_node_info('root') replace_right_cell('root', [:policy_profile]) end def profile_field_changed return unless load_edit("profile_edit__#{params[:id]}", "replace_cell__explorer") @profile = @edit[:profile_id] ? MiqPolicySet.find_by_id(@edit[:profile_id]) : MiqPolicySet.new @edit[:new][:description] = params[:description].blank? ? nil : params[:description] if params[:description] @edit[:new][:notes] = params[:notes].blank? ? nil : params[:notes] if params[:notes] send_button_changes end private def process_profiles(profiles, task) process_elements(profiles, MiqPolicySet, task) end def profile_build_edit_screen @edit = {} @edit[:new] = {} @edit[:current] = {} @profile = params[:id] ? MiqPolicySet.find(params[:id]) : MiqPolicySet.new # Get existing or new record @edit[:key] = "profile_edit__#{@profile.id || "new"}" @edit[:rec_id] = @profile.id || nil @edit[:profile_id] = @profile.id @edit[:new][:description] = @profile.description @edit[:new][:notes] = @profile.notes @edit[:new][:policies] = {} policies = @profile.members # Get the member sets policies.each { |p| @edit[:new][:policies][ui_lookup(:model => p.towhat) + " #{p.mode.capitalize}: " + p.description] = p.id } # Build a hash for the members list box @edit[:choices] = {} MiqPolicy.all.each do |p| @edit[:choices][ui_lookup(:model => p.towhat) + " #{p.mode.capitalize}: " + p.description] = p.id # Build a hash for the policies to choose from end @edit[:new][:policies].each_key do |key| @edit[:choices].delete(key) # Remove any policies that are in the members list box end @edit[:current] = copy_hash(@edit[:new]) @embedded = true @in_a_form = true @edit[:current][:add] = true if @edit[:profile_id].blank? # Force changed to be true if adding a record session[:changed] = (@edit[:new] != @edit[:current]) end def profile_get_all @profiles = MiqPolicySet.all.sort_by { |ps| ps.description.downcase } set_search_text @profiles = apply_search_filter(@search_text, @profiles) unless @search_text.blank? @right_cell_text = _("All %{models}") % {:models => ui_lookup(:models => "MiqPolicySet")} @right_cell_div = "profile_list" end # Get information for a profile def profile_get_info(profile) @record = @profile = profile @profile_policies = @profile.miq_policies.sort_by { |p| [p.towhat, p.mode, p.description.downcase] } @right_cell_text = _("%{model} \"%{name}\"") % {:model => ui_lookup(:model => "MiqPolicySet"), :name => @profile.description} @right_cell_div = "profile_details" end end
apache-2.0
apache/jmeter
src/protocol/http/src/main/java/org/apache/jmeter/protocol/http/visualizers/RequestViewHTTP.java
18933
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jmeter.protocol.http.visualizers; import java.awt.BorderLayout; import java.awt.Component; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.JTable; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableColumn; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.config.Argument; import org.apache.jmeter.gui.util.HeaderAsPropertyRenderer; import org.apache.jmeter.gui.util.TextBoxDialoger.TextBoxDoubleClick; import org.apache.jmeter.protocol.http.config.MultipartUrlConfig; import org.apache.jmeter.protocol.http.sampler.HTTPSampleResult; import org.apache.jmeter.protocol.http.util.HTTPConstants; import org.apache.jmeter.testelement.property.JMeterProperty; import org.apache.jmeter.util.JMeterUtils; import org.apache.jmeter.visualizers.RequestView; import org.apache.jmeter.visualizers.SamplerResultTab.RowResult; import org.apache.jmeter.visualizers.SearchTextExtension; import org.apache.jmeter.visualizers.SearchTextExtension.ISearchTextExtensionProvider; import org.apache.jorphan.gui.GuiUtils; import org.apache.jorphan.gui.ObjectTableModel; import org.apache.jorphan.gui.RendererUtils; import org.apache.jorphan.reflect.Functor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Specializer panel to view a HTTP request parsed */ public class RequestViewHTTP implements RequestView { private static final Logger log = LoggerFactory.getLogger(RequestViewHTTP.class); private static final String KEY_LABEL = "view_results_table_request_tab_http"; //$NON-NLS-1$ private static final String CHARSET_DECODE = StandardCharsets.ISO_8859_1.name(); private static final String PARAM_CONCATENATE = "&"; //$NON-NLS-1$ private JPanel paneParsed; private ObjectTableModel requestModel = null; private ObjectTableModel paramsModel = null; private ObjectTableModel headersModel = null; private static final String[] COLUMNS_REQUEST = new String[] { " ", // one space for blank header // $NON-NLS-1$ " " }; // one space for blank header // $NON-NLS-1$ private static final String[] COLUMNS_PARAMS = new String[] { "view_results_table_request_params_key", // $NON-NLS-1$ "view_results_table_request_params_value" }; // $NON-NLS-1$ private static final String[] COLUMNS_HEADERS = new String[] { "view_results_table_request_headers_key", // $NON-NLS-1$ "view_results_table_request_headers_value" }; // $NON-NLS-1$ private JTable tableRequest = null; private JTable tableParams = null; private JTable tableHeaders = null; // Request headers column renderers private static final TableCellRenderer[] RENDERERS_REQUEST = new TableCellRenderer[] { null, // Key null, // Value }; // Request headers column renderers private static final TableCellRenderer[] RENDERERS_PARAMS = new TableCellRenderer[] { null, // Key null, // Value }; // Request headers column renderers private static final TableCellRenderer[] RENDERERS_HEADERS = new TableCellRenderer[] { null, // Key null, // Value }; private SearchTextExtension searchTextExtension; /** * Pane to view HTTP request sample in view results tree */ public RequestViewHTTP() { requestModel = new ObjectTableModel(COLUMNS_REQUEST, RowResult.class, // The object used for each row new Functor[] { new Functor("getKey"), // $NON-NLS-1$ new Functor("getValue") }, // $NON-NLS-1$ new Functor[] { null, null }, new Class[] { String.class, String.class }, false); paramsModel = new ObjectTableModel(COLUMNS_PARAMS, RowResult.class, // The object used for each row new Functor[] { new Functor("getKey"), // $NON-NLS-1$ new Functor("getValue") }, // $NON-NLS-1$ new Functor[] { null, null }, new Class[] { String.class, String.class }, false); headersModel = new ObjectTableModel(COLUMNS_HEADERS, RowResult.class, // The object used for each row new Functor[] { new Functor("getKey"), // $NON-NLS-1$ new Functor("getValue") }, // $NON-NLS-1$ new Functor[] { null, null }, new Class[] { String.class, String.class }, false); } /* (non-Javadoc) * @see org.apache.jmeter.visualizers.request.RequestView#init() */ @Override public void init() { paneParsed = new JPanel(new BorderLayout(0, 5)); paneParsed.add(createRequestPane(), BorderLayout.CENTER); this.searchTextExtension = new SearchTextExtension(new RequestViewHttpSearchProvider()); paneParsed.add(searchTextExtension.getSearchToolBar(), BorderLayout.NORTH); } /* (non-Javadoc) * @see org.apache.jmeter.visualizers.request.RequestView#clearData() */ @Override public void clearData() { requestModel.clearData(); paramsModel.clearData(); headersModel.clearData(); // clear results table before filling } /* (non-Javadoc) * @see org.apache.jmeter.visualizers.request.RequestView#setSamplerResult(java.lang.Object) */ @Override public void setSamplerResult(Object objectResult) { this.searchTextExtension.resetTextToFind(); if (objectResult instanceof HTTPSampleResult) { HTTPSampleResult sampleResult = (HTTPSampleResult) objectResult; // Display with same order HTTP protocol requestModel.addRow(new RowResult( JMeterUtils.getResString("view_results_table_request_http_method"), //$NON-NLS-1$ sampleResult.getHTTPMethod())); // Parsed request headers LinkedHashMap<String, String> lhm = JMeterUtils.parseHeaders(sampleResult.getRequestHeaders()); for (Map.Entry<String, String> entry : lhm.entrySet()) { headersModel.addRow(new RowResult(entry.getKey(), entry.getValue())); } URL hUrl = sampleResult.getURL(); if (hUrl != null){ // can be null - e.g. if URL was invalid requestModel.addRow(new RowResult(JMeterUtils .getResString("view_results_table_request_http_protocol"), //$NON-NLS-1$ hUrl.getProtocol())); requestModel.addRow(new RowResult( JMeterUtils.getResString("view_results_table_request_http_host"), //$NON-NLS-1$ hUrl.getHost())); int port = hUrl.getPort() == -1 ? hUrl.getDefaultPort() : hUrl.getPort(); requestModel.addRow(new RowResult( JMeterUtils.getResString("view_results_table_request_http_port"), //$NON-NLS-1$ port)); requestModel.addRow(new RowResult( JMeterUtils.getResString("view_results_table_request_http_path"), //$NON-NLS-1$ hUrl.getPath())); String queryGet = hUrl.getQuery() == null ? "" : hUrl.getQuery(); //$NON-NLS-1$ boolean isMultipart = isMultipart(lhm); // Concatenate query post if exists String queryPost = sampleResult.getQueryString(); if (!isMultipart && StringUtils.isNotBlank(queryPost)) { if (queryGet.length() > 0) { queryGet += PARAM_CONCATENATE; } queryGet += queryPost; } if (StringUtils.isNotBlank(queryGet)) { Set<Map.Entry<String, String[]>> keys = RequestViewHTTP.getQueryMap(queryGet).entrySet(); for (Map.Entry<String, String[]> entry : keys) { for (String value : entry.getValue()) { paramsModel.addRow(new RowResult(entry.getKey(), value)); } } } if(isMultipart && StringUtils.isNotBlank(queryPost)) { String contentType = lhm.get(HTTPConstants.HEADER_CONTENT_TYPE); String boundaryString = extractBoundary(contentType); MultipartUrlConfig urlconfig = new MultipartUrlConfig(boundaryString); urlconfig.parseArguments(queryPost); for(JMeterProperty prop : urlconfig.getArguments()) { Argument arg = (Argument) prop.getObjectValue(); paramsModel.addRow(new RowResult(arg.getName(), arg.getValue())); } } } // Display cookie in headers table (same location on http protocol) String cookie = sampleResult.getCookies(); if (cookie != null && cookie.length() > 0) { headersModel.addRow(new RowResult( JMeterUtils.getParsedLabel("view_results_table_request_http_cookie"), //$NON-NLS-1$ sampleResult.getCookies())); } } else { // add a message when no http sample requestModel.addRow(new RowResult("", //$NON-NLS-1$ JMeterUtils.getResString("view_results_table_request_http_nohttp"))); //$NON-NLS-1$ } } /** * Extract the multipart boundary * @param contentType the content type header * @return the boundary string */ private String extractBoundary(String contentType) { // Get the boundary string for the multiparts from the content type String boundaryString = contentType.substring(contentType.toLowerCase(java.util.Locale.ENGLISH).indexOf("boundary=") + "boundary=".length()); //TODO check in the RFC if other char can be used as separator String[] split = boundaryString.split(";"); if(split.length > 1) { boundaryString = split[0]; } return boundaryString; } /** * check if the request is multipart * @param headers the http request headers * @return true if the request is multipart */ private boolean isMultipart(LinkedHashMap<String, String> headers) { String contentType = headers.get(HTTPConstants.HEADER_CONTENT_TYPE); return contentType != null && contentType.startsWith(HTTPConstants.MULTIPART_FORM_DATA); } /** * @param query query to parse for param and value pairs * @return Map params and values */ //TODO: move to utils class (JMeterUtils?) public static Map<String, String[]> getQueryMap(String query) { Map<String, String[]> map = new HashMap<>(); String[] params = query.split(PARAM_CONCATENATE); for (String param : params) { String[] paramSplit = param.split("="); String name = decodeQuery(paramSplit[0]); // hack for SOAP request (generally) if (name.trim().startsWith("<?")) { // $NON-NLS-1$ map.put(" ", new String[] {query}); //blank name // $NON-NLS-1$ return map; } // the post payload is not key=value if((param.startsWith("=") && paramSplit.length == 1) || paramSplit.length > 2) { map.put(" ", new String[] {query}); //blank name // $NON-NLS-1$ return map; } String value = ""; if(paramSplit.length>1) { value = decodeQuery(paramSplit[1]); } String[] known = map.get(name); if(known == null) { known = new String[] {value}; } else { String[] tmp = new String[known.length+1]; tmp[tmp.length-1] = value; System.arraycopy(known, 0, tmp, 0, known.length); known = tmp; } map.put(name, known); } return map; } /** * Decode a query string * * @param query * to decode * @return the decoded query string, if it can be url-decoded. Otherwise the original * query will be returned. */ public static String decodeQuery(String query) { if (query != null && query.length() > 0) { try { return URLDecoder.decode(query, CHARSET_DECODE); // better ISO-8859-1 than UTF-8 } catch (IllegalArgumentException | UnsupportedEncodingException e) { log.warn( "Error decoding query, maybe your request parameters should be encoded:" + query, e); return query; } } return ""; } @Override public JPanel getPanel() { return paneParsed; } /** * Create a pane with three tables (request, params, headers) * * @return Pane to display request data */ private Component createRequestPane() { // Set up the 1st table Result with empty headers tableRequest = new JTable(requestModel); JMeterUtils.applyHiDPI(tableRequest); tableRequest.setToolTipText(JMeterUtils.getResString("textbox_tooltip_cell")); // $NON-NLS-1$ tableRequest.addMouseListener(new TextBoxDoubleClick(tableRequest)); setFirstColumnPreferredAndMaxWidth(tableRequest); RendererUtils.applyRenderers(tableRequest, RENDERERS_REQUEST); // Set up the 2nd table tableParams = new JTable(paramsModel); JMeterUtils.applyHiDPI(tableParams); tableParams.setToolTipText(JMeterUtils.getResString("textbox_tooltip_cell")); // $NON-NLS-1$ tableParams.addMouseListener(new TextBoxDoubleClick(tableParams)); TableColumn column = tableParams.getColumnModel().getColumn(0); column.setPreferredWidth(160); tableParams.getTableHeader().setDefaultRenderer(new HeaderAsPropertyRenderer()); RendererUtils.applyRenderers(tableParams, RENDERERS_PARAMS); // Set up the 3rd table tableHeaders = new JTable(headersModel); JMeterUtils.applyHiDPI(tableHeaders); tableHeaders.setToolTipText(JMeterUtils.getResString("textbox_tooltip_cell")); // $NON-NLS-1$ tableHeaders.addMouseListener(new TextBoxDoubleClick(tableHeaders)); setFirstColumnPreferredAndMaxWidth(tableHeaders); tableHeaders.getTableHeader().setDefaultRenderer( new HeaderAsPropertyRenderer()); RendererUtils.applyRenderers(tableHeaders, RENDERERS_HEADERS); // Create the split pane JSplitPane topSplit = new JSplitPane(JSplitPane.VERTICAL_SPLIT, GuiUtils.makeScrollPane(tableParams), GuiUtils.makeScrollPane(tableHeaders)); topSplit.setOneTouchExpandable(true); topSplit.setResizeWeight(0.50); // set split ratio topSplit.setBorder(null); // see bug jdk 4131528 JSplitPane paneParsed = new JSplitPane(JSplitPane.VERTICAL_SPLIT, GuiUtils.makeScrollPane(tableRequest), topSplit); paneParsed.setOneTouchExpandable(true); paneParsed.setResizeWeight(0.25); // set split ratio (only 5 lines to display) paneParsed.setBorder(null); // see bug jdk 4131528 // Hint to background color on bottom tabs (grey, not blue) JPanel panel = new JPanel(new BorderLayout()); panel.add(paneParsed); return panel; } private void setFirstColumnPreferredAndMaxWidth(JTable table) { TableColumn column = table.getColumnModel().getColumn(0); column.setMaxWidth(300); column.setPreferredWidth(160); } /* (non-Javadoc) * @see org.apache.jmeter.visualizers.request.RequestView#getLabel() */ @Override public String getLabel() { return JMeterUtils.getResString(KEY_LABEL); } /** * Search implementation for the http parameter table */ private class RequestViewHttpSearchProvider implements ISearchTextExtensionProvider { private int lastPosition = -1; @Override public void resetTextToFind() { lastPosition = -1; if(tableParams != null) { tableParams.clearSelection(); } } @Override public boolean executeAndShowTextFind(Pattern pattern) { boolean found = false; if(tableParams != null) { tableParams.clearSelection(); outerloop: for (int i = lastPosition+1; i < tableParams.getRowCount(); i++) { for (int j = 0; j < COLUMNS_PARAMS.length; j++) { Object o = tableParams.getModel().getValueAt(i, j); if(o instanceof String) { Matcher matcher = pattern.matcher((String) o); if (matcher.find()) { found = true; tableParams.setRowSelectionInterval(i, i); tableParams.scrollRectToVisible(tableParams.getCellRect(i, 0, true)); lastPosition = i; break outerloop; } } } } if(!found) { resetTextToFind(); } } return found; } } }
apache-2.0
ThoughtWorksStudios/mingle_git_plugin
tools/gems/gems/activerecord-jdbc-adapter-1.1.1/src/java/arjdbc/derby/DerbyModule.java
15045
/***** BEGIN LICENSE BLOCK ***** * Copyright (c) 2006-2007, 2010 Nick Sieger <[email protected]> * Copyright (c) 2006-2007 Ola Bini <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ***** END LICENSE BLOCK *****/ package arjdbc.derby; import java.sql.SQLException; import arjdbc.jdbc.RubyJdbcConnection; import org.jruby.Ruby; import org.jruby.RubyBigDecimal; import org.jruby.RubyBignum; import org.jruby.RubyBoolean; import org.jruby.RubyFixnum; import org.jruby.RubyFloat; import org.jruby.RubyModule; import org.jruby.RubyNumeric; import org.jruby.RubyObjectAdapter; import org.jruby.RubyRange; import org.jruby.RubyString; import org.jruby.anno.JRubyMethod; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; public class DerbyModule { private static RubyObjectAdapter rubyApi; public static void load(RubyModule arJdbc, RubyObjectAdapter adapter) { RubyModule derby = arJdbc.defineModuleUnder("Derby"); derby.defineAnnotatedMethods(DerbyModule.class); RubyModule column = derby.defineModuleUnder("Column"); column.defineAnnotatedMethods(Column.class); rubyApi = adapter; } public static class Column { @JRubyMethod(name = "type_cast", required = 1) public static IRubyObject type_cast(IRubyObject recv, IRubyObject value) { Ruby runtime = recv.getRuntime(); if (value.isNil() || ((value instanceof RubyString) && value.toString().trim().equalsIgnoreCase("null"))) { return runtime.getNil(); } String type = rubyApi.getInstanceVariable(recv, "@type").toString(); switch (type.charAt(0)) { case 's': //string return value; case 't': //text, timestamp, time if (type.equals("text")) { return value; } else if (type.equals("timestamp")) { return rubyApi.callMethod(recv.getMetaClass(), "string_to_time", value); } else { //time return rubyApi.callMethod(recv.getMetaClass(), "string_to_dummy_time", value); } case 'i': //integer case 'p': //primary key if (value.respondsTo("to_i")) { return rubyApi.callMethod(value, "to_i"); } else { return runtime.newFixnum(value.isTrue() ? 1 : 0); } case 'd': //decimal, datetime, date if (type.equals("datetime")) { return rubyApi.callMethod(recv.getMetaClass(), "string_to_time", value); } else if (type.equals("date")) { return rubyApi.callMethod(recv.getMetaClass(), "string_to_date", value); } else { return rubyApi.callMethod(recv.getMetaClass(), "value_to_decimal", value); } case 'f': //float return rubyApi.callMethod(value, "to_f"); case 'b': //binary, boolean if (type.equals("binary")) { return rubyApi.callMethod(recv.getMetaClass(), "binary_to_string", value); } else { return rubyApi.callMethod(recv.getMetaClass(), "value_to_boolean", value); } } return value; } } @JRubyMethod(name = "quote", required = 1, optional = 1) public static IRubyObject quote(ThreadContext context, IRubyObject recv, IRubyObject[] args) { Ruby runtime = recv.getRuntime(); IRubyObject value = args[0]; if (args.length > 1) { IRubyObject col = args[1]; String type = rubyApi.callMethod(col, "type").toString(); // intercept and change value, maybe, if the column type is :text or :string if (type.equals("text") || type.equals("string")) { value = make_ruby_string_for_text_column(context, recv, runtime, value); } if (value instanceof RubyString) { if (type.equals("string")) { return quote_string_with_surround(runtime, "'", (RubyString)value, "'"); } else if (type.equals("text")) { return quote_string_with_surround(runtime, "CAST('", (RubyString)value, "' AS CLOB)"); } else if (type.equals("binary")) { return hexquote_string_with_surround(runtime, "CAST(X'", (RubyString)value, "' AS BLOB)"); } else { // column type :integer or other numeric or date version if (only_digits((RubyString)value)) { return value; } else { return super_quote(context, recv, runtime, value, col); } } } else if ((value instanceof RubyFloat) || (value instanceof RubyFixnum) || (value instanceof RubyBignum)) { if (type.equals("string")) { return quote_string_with_surround(runtime, "'", RubyString.objAsString(context, value), "'"); } } } return super_quote(context, recv, runtime, value, runtime.getNil()); } /* * Derby is not permissive like MySql. Try and send an Integer to a CLOB or VARCHAR column and Derby will vomit. * This method turns non stringy things into strings. */ private static IRubyObject make_ruby_string_for_text_column(ThreadContext context, IRubyObject recv, Ruby runtime, IRubyObject value) { RubyModule multibyteChars = (RubyModule) ((RubyModule) ((RubyModule) runtime.getModule("ActiveSupport")).getConstant("Multibyte")).getConstantAt("Chars"); if (value instanceof RubyString || rubyApi.isKindOf(value, multibyteChars) || value.isNil()) { return value; } if (value instanceof RubyBoolean) { return value.isTrue() ? runtime.newString("1") : runtime.newString("0"); } else if (value instanceof RubyFloat || value instanceof RubyFixnum || value instanceof RubyBignum) { return RubyString.objAsString(context, value); } else if ( value instanceof RubyBigDecimal) { return rubyApi.callMethod(value, "to_s", runtime.newString("F")); } else { if (rubyApi.callMethod(value, "acts_like?", runtime.newString("date")).isTrue() || rubyApi.callMethod(value, "acts_like?", runtime.newString("time")).isTrue()) { return (RubyString)rubyApi.callMethod(recv, "quoted_date", value); } else { return (RubyString)rubyApi.callMethod(value, "to_yaml"); } } } private final static ByteList NULL = new ByteList("NULL".getBytes()); private static IRubyObject super_quote(ThreadContext context, IRubyObject recv, Ruby runtime, IRubyObject value, IRubyObject col) { if (value.respondsTo("quoted_id")) { return rubyApi.callMethod(value, "quoted_id"); } IRubyObject type = (col.isNil()) ? col : rubyApi.callMethod(col, "type"); RubyModule multibyteChars = (RubyModule) ((RubyModule) ((RubyModule) runtime.getModule("ActiveSupport")).getConstant("Multibyte")).getConstantAt("Chars"); if (value instanceof RubyString || rubyApi.isKindOf(value, multibyteChars)) { RubyString svalue = RubyString.objAsString(context, value); if (type == runtime.newSymbol("binary") && col.getType().respondsTo("string_to_binary")) { return quote_string_with_surround(runtime, "'", (RubyString)(rubyApi.callMethod(col.getType(), "string_to_binary", svalue)), "'"); } else if (type == runtime.newSymbol("integer") || type == runtime.newSymbol("float")) { return RubyString.objAsString(context, ((type == runtime.newSymbol("integer")) ? rubyApi.callMethod(svalue, "to_i") : rubyApi.callMethod(svalue, "to_f"))); } else { return quote_string_with_surround(runtime, "'", svalue, "'"); } } else if (value.isNil()) { return runtime.newString(NULL); } else if (value instanceof RubyBoolean) { return (value.isTrue() ? (type == runtime.newSymbol(":integer")) ? runtime.newString("1") : rubyApi.callMethod(recv, "quoted_true") : (type == runtime.newSymbol(":integer")) ? runtime.newString("0") : rubyApi.callMethod(recv, "quoted_false")); } else if((value instanceof RubyFloat) || (value instanceof RubyFixnum) || (value instanceof RubyBignum)) { return RubyString.objAsString(context, value); } else if(value instanceof RubyBigDecimal) { return rubyApi.callMethod(value, "to_s", runtime.newString("F")); } else if (rubyApi.callMethod(value, "acts_like?", runtime.newString("date")).isTrue() || rubyApi.callMethod(value, "acts_like?", runtime.newString("time")).isTrue()) { return quote_string_with_surround(runtime, "'", (RubyString)(rubyApi.callMethod(recv, "quoted_date", value)), "'"); } else { return quote_string_with_surround(runtime, "'", (RubyString)(rubyApi.callMethod(value, "to_yaml")), "'"); } } private final static ByteList TWO_SINGLE = new ByteList(new byte[]{'\'','\''}); private static IRubyObject quote_string_with_surround(Ruby runtime, String before, RubyString string, String after) { ByteList input = string.getByteList(); ByteList output = new ByteList(before.getBytes()); for(int i = input.begin; i< input.begin + input.realSize; i++) { switch(input.bytes[i]) { case '\'': output.append(input.bytes[i]); //FALLTHROUGH default: output.append(input.bytes[i]); } } output.append(after.getBytes()); return runtime.newString(output); } private final static byte[] HEX = {'0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F'}; private static IRubyObject hexquote_string_with_surround(Ruby runtime, String before, RubyString string, String after) { ByteList input = string.getByteList(); ByteList output = new ByteList(before.getBytes()); int written = 0; for(int i = input.begin; i< input.begin + input.realSize; i++) { byte b1 = input.bytes[i]; byte higher = HEX[(((char)b1)>>4)%16]; byte lower = HEX[((char)b1)%16]; output.append(higher); output.append(lower); written += 2; if(written >= 16334) { // max hex length = 16334 output.append("'||X'".getBytes()); written = 0; } } output.append(after.getBytes()); return runtime.newStringShared(output); } private static boolean only_digits(RubyString inp) { ByteList input = inp.getByteList(); for(int i = input.begin; i< input.begin + input.realSize; i++) { if(input.bytes[i] < '0' || input.bytes[i] > '9') { return false; } } return true; } @JRubyMethod(name = "quote_string", required = 1) public static IRubyObject quote_string(IRubyObject recv, IRubyObject string) { boolean replacementFound = false; ByteList bl = ((RubyString) string).getByteList(); for(int i = bl.begin; i < bl.begin + bl.realSize; i++) { switch (bl.bytes[i]) { case '\'': break; default: continue; } // On first replacement allocate a different bytelist so we don't manip original if(!replacementFound) { i-= bl.begin; bl = new ByteList(bl); replacementFound = true; } bl.replace(i, 1, TWO_SINGLE); i+=1; } if(replacementFound) { return recv.getRuntime().newStringShared(bl); } else { return string; } } @JRubyMethod(name = "select_all", rest = true) public static IRubyObject select_all(IRubyObject recv, IRubyObject[] args) { return rubyApi.callMethod(recv, "execute", args); } @JRubyMethod(name = "select_one", rest = true) public static IRubyObject select_one(IRubyObject recv, IRubyObject[] args) { IRubyObject limit = rubyApi.getInstanceVariable(recv, "@limit"); if (limit == null || limit.isNil()) { rubyApi.setInstanceVariable(recv, "@limit", recv.getRuntime().newFixnum(1)); } try { IRubyObject result = rubyApi.callMethod(recv, "execute", args); return rubyApi.callMethod(result, "first"); } finally { rubyApi.setInstanceVariable(recv, "@limit", recv.getRuntime().getNil()); } } @JRubyMethod(name = "_execute", required = 1, optional = 1) public static IRubyObject _execute(ThreadContext context, IRubyObject recv, IRubyObject[] args) throws SQLException, java.io.IOException { Ruby runtime = recv.getRuntime(); RubyJdbcConnection conn = (RubyJdbcConnection) rubyApi.getInstanceVariable(recv, "@connection"); String sql = args[0].toString().trim().toLowerCase(); if (sql.charAt(0) == '(') { sql = sql.substring(1).trim(); } if (sql.startsWith("insert")) { return conn.execute_insert(context, args[0]); } else if (sql.startsWith("select") || sql.startsWith("show") || sql.startsWith("values")) { return conn.execute_query(context, args[0]); } else { return conn.execute_update(context, args[0]); } } }
apache-2.0
zephyrproject-rtos/zephyr
scripts/west_commands/runners/canopen_program.py
13074
# Copyright (c) 2020 Vestas Wind Systems A/S # # SPDX-License-Identifier: Apache-2.0 '''Runner for performing program download over CANopen (DSP 302-3).''' import argparse import os import time from runners.core import ZephyrBinaryRunner, RunnerCaps try: import canopen from progress.bar import Bar MISSING_REQUIREMENTS = False except ImportError: MISSING_REQUIREMENTS = True # Default Python-CAN context to use, see python-can documentation for details DEFAULT_CAN_CONTEXT = 'default' # Default program number DEFAULT_PROGRAM_NUMBER = 1 # Default timeouts and retries DEFAULT_TIMEOUT = 10.0 # seconds DEFAULT_SDO_TIMEOUT = 0.3 # seconds DEFAULT_SDO_RETRIES = 1 # Object dictionary indexes H1F50_PROGRAM_DATA = 0x1F50 H1F51_PROGRAM_CTRL = 0x1F51 H1F56_PROGRAM_SWID = 0x1F56 H1F57_FLASH_STATUS = 0x1F57 # Program control commands PROGRAM_CTRL_STOP = 0x00 PROGRAM_CTRL_START = 0x01 PROGRAM_CTRL_RESET = 0x02 PROGRAM_CTRL_CLEAR = 0x03 PROGRAM_CTRL_ZEPHYR_CONFIRM = 0x80 class ToggleAction(argparse.Action): '''Toggle argument parser''' def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, not option_string.startswith('--no-')) class CANopenBinaryRunner(ZephyrBinaryRunner): '''Runner front-end for CANopen.''' def __init__(self, cfg, dev_id, can_context=DEFAULT_CAN_CONTEXT, program_number=DEFAULT_PROGRAM_NUMBER, confirm=True, confirm_only=True, timeout=DEFAULT_TIMEOUT, sdo_retries=DEFAULT_SDO_RETRIES, sdo_timeout=DEFAULT_SDO_TIMEOUT): if MISSING_REQUIREMENTS: raise RuntimeError('one or more Python dependencies were missing; ' "see the getting started guide for details on " "how to fix") super().__init__(cfg) self.dev_id = dev_id # Only use for error checking in do_run() self.bin_file = cfg.bin_file self.confirm = confirm self.confirm_only = confirm_only self.timeout = timeout self.downloader = CANopenProgramDownloader(logger=self.logger, node_id=dev_id, can_context=can_context, program_number=program_number, sdo_retries=sdo_retries, sdo_timeout=sdo_timeout) @classmethod def name(cls): return 'canopen' @classmethod def capabilities(cls): return RunnerCaps(commands={'flash'}, dev_id=True, flash_addr=False) @classmethod def dev_id_help(cls) -> str: return 'CANopen Node ID.' @classmethod def do_add_parser(cls, parser): # Optional: parser.add_argument('--node-id', dest='dev_id', help=cls.dev_id_help()) parser.add_argument('--can-context', default=DEFAULT_CAN_CONTEXT, help=f'Python-CAN context to use (default: {DEFAULT_CAN_CONTEXT})') parser.add_argument('--program-number', type=int, default=DEFAULT_PROGRAM_NUMBER, help=f'program number (default: {DEFAULT_PROGRAM_NUMBER})') parser.add_argument('--confirm', '--no-confirm', dest='confirm', nargs=0, action=ToggleAction, help='confirm after starting? (default: yes)') parser.add_argument('--confirm-only', default=False, action='store_true', help='confirm only, no program download (default: no)') parser.add_argument('--timeout', type=float, default=DEFAULT_TIMEOUT, help=f'Timeout in seconds (default: {DEFAULT_TIMEOUT})') parser.add_argument('--sdo-retries', type=int, default=DEFAULT_SDO_RETRIES, help=f'CANopen SDO request retries (default: {DEFAULT_SDO_RETRIES})') parser.add_argument('--sdo-timeout', type=float, default=DEFAULT_SDO_TIMEOUT, help=f'''CANopen SDO response timeout in seconds (default: {DEFAULT_SDO_TIMEOUT})''') parser.set_defaults(confirm=True) @classmethod def do_create(cls, cfg, args): return CANopenBinaryRunner(cfg, int(args.dev_id), can_context=args.can_context, program_number=args.program_number, confirm=args.confirm, confirm_only=args.confirm_only, timeout=args.timeout, sdo_retries=args.sdo_retries, sdo_timeout=args.sdo_timeout) def do_run(self, command, **kwargs): if not self.dev_id: raise RuntimeError('Please specify a CANopen node ID with the ' '-i/--dev-id or --node-id command-line switch.') if command == 'flash': self.flash(**kwargs) def flash(self, **kwargs): '''Download program to flash over CANopen''' self.ensure_output('bin') self.logger.info('Using Node ID %d, program number %d', self.downloader.node_id, self.downloader.program_number) self.downloader.connect() status = self.downloader.wait_for_flash_status_ok(self.timeout) if status == 0: self.downloader.swid() else: self.logger.warning('Flash status 0x{:02x}, ' 'skipping software identification'.format(status)) self.downloader.enter_pre_operational() if self.confirm_only: self.downloader.zephyr_confirm_program() self.downloader.disconnect() return if self.bin_file is None: raise ValueError('Cannot download program; bin_file is missing') self.downloader.stop_program() self.downloader.clear_program() self.downloader.wait_for_flash_status_ok(self.timeout) self.downloader.download(self.bin_file) status = self.downloader.wait_for_flash_status_ok(self.timeout) if status != 0: raise ValueError('Program download failed: ' 'flash status 0x{:02x}'.format(status)) self.downloader.swid() self.downloader.start_program() self.downloader.wait_for_bootup(self.timeout) self.downloader.swid() if self.confirm: self.downloader.enter_pre_operational() self.downloader.zephyr_confirm_program() self.downloader.disconnect() class CANopenProgramDownloader(object): '''CANopen program downloader''' def __init__(self, logger, node_id, can_context=DEFAULT_CAN_CONTEXT, program_number=DEFAULT_PROGRAM_NUMBER, sdo_retries=DEFAULT_SDO_RETRIES, sdo_timeout=DEFAULT_SDO_TIMEOUT): super(CANopenProgramDownloader, self).__init__() self.logger = logger self.node_id = node_id self.can_context = can_context self.program_number = program_number self.network = canopen.Network() self.node = self.network.add_node(self.node_id, self.create_object_dictionary()) self.data_sdo = self.node.sdo[H1F50_PROGRAM_DATA][self.program_number] self.ctrl_sdo = self.node.sdo[H1F51_PROGRAM_CTRL][self.program_number] self.swid_sdo = self.node.sdo[H1F56_PROGRAM_SWID][self.program_number] self.flash_sdo = self.node.sdo[H1F57_FLASH_STATUS][self.program_number] self.node.sdo.MAX_RETRIES = sdo_retries self.node.sdo.RESPONSE_TIMEOUT = sdo_timeout def connect(self): '''Connect to CAN network''' try: self.network.connect(context=self.can_context) except: raise ValueError('Unable to connect to CAN network') def disconnect(self): '''Disconnect from CAN network''' self.network.disconnect() def enter_pre_operational(self): '''Enter pre-operational NMT state''' self.logger.info("Entering pre-operational mode") try: self.node.nmt.state = 'PRE-OPERATIONAL' except: raise ValueError('Failed to enter pre-operational mode') def _ctrl_program(self, cmd): '''Write program control command to CANopen object dictionary (0x1f51)''' try: self.ctrl_sdo.raw = cmd except: raise ValueError('Unable to write control command 0x{:02x}'.format(cmd)) def stop_program(self): '''Write stop control command to CANopen object dictionary (0x1f51)''' self.logger.info('Stopping program') self._ctrl_program(PROGRAM_CTRL_STOP) def start_program(self): '''Write start control command to CANopen object dictionary (0x1f51)''' self.logger.info('Starting program') self._ctrl_program(PROGRAM_CTRL_START) def clear_program(self): '''Write clear control command to CANopen object dictionary (0x1f51)''' self.logger.info('Clearing program') self._ctrl_program(PROGRAM_CTRL_CLEAR) def zephyr_confirm_program(self): '''Write confirm control command to CANopen object dictionary (0x1f51)''' self.logger.info('Confirming program') self._ctrl_program(PROGRAM_CTRL_ZEPHYR_CONFIRM) def swid(self): '''Read software identification from CANopen object dictionary (0x1f56)''' try: swid = self.swid_sdo.raw except: raise ValueError('Failed to read software identification') self.logger.info('Program software identification: 0x{:08x}'.format(swid)) return swid def flash_status(self): '''Read flash status identification''' try: status = self.flash_sdo.raw except: raise ValueError('Failed to read flash status identification') return status def download(self, bin_file): '''Download program to CANopen object dictionary (0x1f50)''' self.logger.info('Downloading program: %s', bin_file) try: size = os.path.getsize(bin_file) infile = open(bin_file, 'rb') outfile = self.data_sdo.open('wb', size=size) progress = Bar('%(percent)d%%', max=size, suffix='%(index)d/%(max)dB') while True: chunk = infile.read(1024) if not chunk: break outfile.write(chunk) progress.next(n=len(chunk)) except: raise ValueError('Failed to download program') finally: progress.finish() infile.close() outfile.close() def wait_for_bootup(self, timeout=DEFAULT_TIMEOUT): '''Wait for boot-up message reception''' self.logger.info('Waiting for boot-up message...') try: self.node.nmt.wait_for_bootup(timeout=timeout) except: raise ValueError('Timeout waiting for boot-up message') def wait_for_flash_status_ok(self, timeout=DEFAULT_TIMEOUT): '''Wait for flash status ok''' self.logger.info('Waiting for flash status ok') end_time = time.time() + timeout while True: now = time.time() status = self.flash_status() if status == 0: break if now > end_time: return status return status @staticmethod def create_object_dictionary(): '''Create a synthetic CANopen object dictionary for program download''' objdict = canopen.objectdictionary.ObjectDictionary() array = canopen.objectdictionary.Array('Program data', 0x1f50) member = canopen.objectdictionary.Variable('', 0x1f50, subindex=1) member.data_type = canopen.objectdictionary.DOMAIN array.add_member(member) objdict.add_object(array) array = canopen.objectdictionary.Array('Program control', 0x1f51) member = canopen.objectdictionary.Variable('', 0x1f51, subindex=1) member.data_type = canopen.objectdictionary.UNSIGNED8 array.add_member(member) objdict.add_object(array) array = canopen.objectdictionary.Array('Program sofware ID', 0x1f56) member = canopen.objectdictionary.Variable('', 0x1f56, subindex=1) member.data_type = canopen.objectdictionary.UNSIGNED32 array.add_member(member) objdict.add_object(array) array = canopen.objectdictionary.Array('Flash error ID', 0x1f57) member = canopen.objectdictionary.Variable('', 0x1f57, subindex=1) member.data_type = canopen.objectdictionary.UNSIGNED32 array.add_member(member) objdict.add_object(array) return objdict
apache-2.0
dlyle65535/metron
metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/GetProfile.java
10633
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.metron.profiler.client.stellar; import static org.apache.metron.profiler.client.stellar.ProfilerClientConfig.PROFILER_COLUMN_FAMILY; import static org.apache.metron.profiler.client.stellar.ProfilerClientConfig.PROFILER_HBASE_TABLE; import static org.apache.metron.profiler.client.stellar.ProfilerClientConfig.PROFILER_HBASE_TABLE_PROVIDER; import static org.apache.metron.profiler.client.stellar.ProfilerClientConfig.PROFILER_PERIOD; import static org.apache.metron.profiler.client.stellar.ProfilerClientConfig.PROFILER_PERIOD_UNITS; import static org.apache.metron.profiler.client.stellar.ProfilerClientConfig.PROFILER_SALT_DIVISOR; import static org.apache.metron.profiler.client.stellar.Util.getArg; import static org.apache.metron.profiler.client.stellar.Util.getEffectiveConfig; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.metron.stellar.dsl.Context; import org.apache.metron.stellar.dsl.ParseException; import org.apache.metron.stellar.dsl.Stellar; import org.apache.metron.stellar.dsl.StellarFunction; import org.apache.metron.hbase.HTableProvider; import org.apache.metron.hbase.TableProvider; import org.apache.metron.profiler.ProfilePeriod; import org.apache.metron.profiler.client.HBaseProfilerClient; import org.apache.metron.profiler.client.ProfilerClient; import org.apache.metron.profiler.hbase.ColumnBuilder; import org.apache.metron.profiler.hbase.RowKeyBuilder; import org.apache.metron.profiler.hbase.SaltyRowKeyBuilder; import org.apache.metron.profiler.hbase.ValueOnlyColumnBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A Stellar function that can retrieve data contained within a Profile. * * PROFILE_GET * * Retrieve all values for 'entity1' from 'profile1' over the past 4 hours. * * <code>PROFILE_GET('profile1', 'entity1', 4, 'HOURS')</code> * * Retrieve all values for 'entity1' from 'profile1' over the past 2 days. * * <code>PROFILE_GET('profile1', 'entity1', 2, 'DAYS')</code> * * Retrieve all values for 'entity1' from 'profile1' that occurred on 'weekdays' over the past month. * * <code>PROFILE_GET('profile1', 'entity1', 1, 'MONTHS', ['weekdays'])</code> * * Retrieve all values for 'entity1' from 'profile1' over the past 2 days, with no 'groupBy', * and overriding the usual global client configuration parameters for window duration. * * <code>PROFILE_GET('profile1', 'entity1', 2, 'DAYS', [], {'profiler.client.period.duration' : '2', 'profiler.client.period.duration.units' : 'MINUTES'})</code> * * Retrieve all values for 'entity1' from 'profile1' that occurred on 'weekdays' over the past month, * overriding the usual global client configuration parameters for window duration. * * <code>PROFILE_GET('profile1', 'entity1', 1, 'MONTHS', ['weekdays'], {'profiler.client.period.duration' : '2', 'profiler.client.period.duration.units' : 'MINUTES'})</code> * */ @Stellar( namespace="PROFILE", name="GET", description="Retrieves a series of values from a stored profile.", params={ "profile - The name of the profile.", "entity - The name of the entity.", "periods - The list of profile periods to grab. These are ProfilePeriod objects.", "groups_list - Optional, must correspond to the 'groupBy' list used in profile creation - List (in square brackets) of "+ "groupBy values used to filter the profile. Default is the " + "empty list, meaning groupBy was not used when creating the profile.", "config_overrides - Optional - Map (in curly braces) of name:value pairs, each overriding the global config parameter " + "of the same name. Default is the empty Map, meaning no overrides." }, returns="The selected profile measurements." ) public class GetProfile implements StellarFunction { /** * Cached client that can retrieve profile values. */ private ProfilerClient client; /** * Cached value of config map actually used to construct the previously cached client. */ private Map<String, Object> cachedConfigMap = new HashMap<String, Object>(6); private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); /** * Initialization. No longer need to do anything in initialization, * as all setup is done lazily and cached. */ @Override public void initialize(Context context) { } /** * Is the function initialized? */ @Override public boolean isInitialized() { return true; } /** * Apply the function. * @param args The function arguments. * @param context */ @Override public Object apply(List<Object> args, Context context) throws ParseException { String profile = getArg(0, String.class, args); String entity = getArg(1, String.class, args); Optional<List<ProfilePeriod>> periods = Optional.ofNullable(getArg(2, List.class, args)); //Optional arguments @SuppressWarnings("unchecked") List<Object> groups = null; Map configOverridesMap = null; if (args.size() < 4) { // no optional args, so default 'groups' and configOverridesMap remains null. groups = new ArrayList<>(0); } else if (args.get(3) instanceof List) { // correct extensible usage groups = getArg(3, List.class, args); if (args.size() >= 5) { configOverridesMap = getArg(4, Map.class, args); if (configOverridesMap.isEmpty()) configOverridesMap = null; } } else { // Deprecated "varargs" style usage for groups_list // configOverridesMap cannot be specified so it remains null. groups = getGroupsArg(3, args); } Map<String, Object> effectiveConfig = getEffectiveConfig(context, configOverridesMap); //lazily create new profiler client if needed if (client == null || !cachedConfigMap.equals(effectiveConfig)) { RowKeyBuilder rowKeyBuilder = getRowKeyBuilder(effectiveConfig); ColumnBuilder columnBuilder = getColumnBuilder(effectiveConfig); HTableInterface table = getTable(effectiveConfig); client = new HBaseProfilerClient(table, rowKeyBuilder, columnBuilder); cachedConfigMap = effectiveConfig; } return client.fetch(Object.class, profile, entity, groups, periods.orElse(new ArrayList<>(0))); } /** * Get the groups defined by the user. * * The user can specify 0 or more groups. All arguments from the specified position * on are assumed to be groups. If there is no argument in the specified position, * then it is assumed the user did not specify any groups. * * @param startIndex The starting index of groups within the function argument list. * @param args The function arguments. * @return The groups. */ private List<Object> getGroupsArg(int startIndex, List<Object> args) { List<Object> groups = new ArrayList<>(); for(int i=startIndex; i<args.size(); i++) { String group = getArg(i, String.class, args); groups.add(group); } return groups; } /** * Creates the ColumnBuilder to use in accessing the profile data. * @param global The global configuration. */ private ColumnBuilder getColumnBuilder(Map<String, Object> global) { ColumnBuilder columnBuilder; String columnFamily = PROFILER_COLUMN_FAMILY.get(global, String.class); columnBuilder = new ValueOnlyColumnBuilder(columnFamily); return columnBuilder; } /** * Creates the ColumnBuilder to use in accessing the profile data. * @param global The global configuration. */ private RowKeyBuilder getRowKeyBuilder(Map<String, Object> global) { // how long is the profile period? long duration = PROFILER_PERIOD.get(global, Long.class); LOG.debug("profiler client: {}={}", PROFILER_PERIOD, duration); // which units are used to define the profile period? String configuredUnits = PROFILER_PERIOD_UNITS.get(global, String.class); TimeUnit units = TimeUnit.valueOf(configuredUnits); LOG.debug("profiler client: {}={}", PROFILER_PERIOD_UNITS, units); // what is the salt divisor? Integer saltDivisor = PROFILER_SALT_DIVISOR.get(global, Integer.class); LOG.debug("profiler client: {}={}", PROFILER_SALT_DIVISOR, saltDivisor); return new SaltyRowKeyBuilder(saltDivisor, duration, units); } /** * Create an HBase table used when accessing HBase. * @param global The global configuration. * @return */ private HTableInterface getTable(Map<String, Object> global) { String tableName = PROFILER_HBASE_TABLE.get(global, String.class); TableProvider provider = getTableProvider(global); try { return provider.getTable(HBaseConfiguration.create(), tableName); } catch (IOException e) { throw new IllegalArgumentException(String.format("Unable to access table: %s", tableName), e); } } /** * Create the TableProvider to use when accessing HBase. * @param global The global configuration. */ private TableProvider getTableProvider(Map<String, Object> global) { String clazzName = PROFILER_HBASE_TABLE_PROVIDER.get(global, String.class); TableProvider provider; try { @SuppressWarnings("unchecked") Class<? extends TableProvider> clazz = (Class<? extends TableProvider>) Class.forName(clazzName); provider = clazz.getConstructor().newInstance(); } catch (Exception e) { provider = new HTableProvider(); } return provider; } }
apache-2.0
hbeatty/incubator-trafficcontrol
traffic_portal/app/src/common/api/TypeService.js
3066
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ var TypeService = function($http, ENV, locationUtils, messageModel) { this.getTypes = function(queryParams) { return $http.get(ENV.api['root'] + 'types', {params: queryParams}).then( function (result) { return result.data.response; }, function (err) { throw err; } ) }; this.getType = function(id) { return $http.get(ENV.api['root'] + 'types', {params: {id: id}}).then( function (result) { return result.data.response[0]; }, function (err) { throw err; } ) }; this.createType = function(type) { return $http.post(ENV.api['root'] + 'types', type).then( function(result) { messageModel.setMessages([ { level: 'success', text: 'Type created' } ], true); locationUtils.navigateToPath('/types'); return result; }, function(err) { messageModel.setMessages(err.data.alerts, false); throw err; } ); }; // todo: change to use query param when it is supported this.updateType = function(type) { return $http.put(ENV.api['root'] + 'types/' + type.id, type).then( function(result) { messageModel.setMessages([ { level: 'success', text: 'Type updated' } ], false); return result; }, function(err) { messageModel.setMessages(err.data.alerts, false); throw err; } ); }; // todo: change to use query param when it is supported this.deleteType = function(id) { return $http.delete(ENV.api['root'] + "types/" + id).then( function(result) { messageModel.setMessages([ { level: 'success', text: 'Type deleted' } ], true); return result; }, function(err) { messageModel.setMessages(err.data.alerts, true); throw err; } ); }; }; TypeService.$inject = ['$http', 'ENV', 'locationUtils', 'messageModel']; module.exports = TypeService;
apache-2.0
apache/qpid-jms
qpid-jms-client/src/test/java/org/apache/qpid/jms/producer/JmsQueueSenderTest.java
5048
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.qpid.jms.producer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.fail; import javax.jms.Destination; import javax.jms.InvalidDestinationException; import javax.jms.Message; import javax.jms.Queue; import javax.jms.QueueSender; import javax.jms.Session; import org.apache.qpid.jms.JmsConnectionTestSupport; import org.apache.qpid.jms.JmsQueueSession; import org.apache.qpid.jms.message.JmsOutboundMessageDispatch; import org.apache.qpid.jms.provider.mock.MockRemotePeer; import org.junit.After; import org.junit.Before; import org.junit.Test; public class JmsQueueSenderTest extends JmsConnectionTestSupport { private JmsQueueSession session; private final MockRemotePeer remotePeer = new MockRemotePeer(); @Override @Before public void setUp() throws Exception { super.setUp(); remotePeer.start(); connection = createConnectionToMockProvider(); session = (JmsQueueSession) connection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE); } @Override @After public void tearDown() throws Exception { try { remotePeer.terminate(); } finally { super.tearDown(); } } @Test(timeout = 10000) public void testMultipleCloseCallsNoErrors() throws Exception { Queue queue = session.createQueue(getTestName()); QueueSender sender = session.createSender(queue); sender.close(); sender.close(); } @Test(timeout = 10000) public void testGetQueue() throws Exception { Queue queue = session.createQueue(getTestName()); QueueSender sender = session.createSender(queue); assertSame(queue, sender.getQueue()); } @Test(timeout = 10000) public void testSendToQueueWithNullOnExplicitQueueSender() throws Exception { Queue queue = session.createQueue(getTestName()); QueueSender sender = session.createSender(null); Message message = session.createMessage(); sender.send(queue, message); JmsOutboundMessageDispatch envelope = remotePeer.getLastReceivedMessage(); assertNotNull(envelope); message = envelope.getMessage(); Destination destination = message.getJMSDestination(); assertEquals(queue, destination); } @Test(timeout = 10000) public void testSendToQueueWithDeliveryOptsWithNullOnExplicitQueueSender() throws Exception { Queue queue = session.createQueue(getTestName()); QueueSender sender = session.createSender(null); Message message = session.createMessage(); sender.send(queue, message, Message.DEFAULT_DELIVERY_MODE, Message.DEFAULT_PRIORITY, Message.DEFAULT_TIME_TO_LIVE); JmsOutboundMessageDispatch envelope = remotePeer.getLastReceivedMessage(); assertNotNull(envelope); message = envelope.getMessage(); Destination destination = message.getJMSDestination(); assertEquals(queue, destination); } @Test(timeout = 10000) public void testSendToQueueWithNullOnExplicitQueueSenderThrowsInvalidDestinationException() throws Exception { Queue queue = session.createQueue(getTestName()); QueueSender sender = session.createSender(queue); Message message = session.createMessage(); try { sender.send((Queue) null, message); fail("Expected exception to be thrown"); } catch (InvalidDestinationException ide) { // expected } } @Test(timeout = 10000) public void testSendToQueueWithDeliveryOptsWithNullOnExplicitQueueSenderThrowsInvalidDestinationException() throws Exception { Queue queue = session.createQueue(getTestName()); QueueSender sender = session.createSender(queue); Message message = session.createMessage(); try { sender.send((Queue) null, message, Message.DEFAULT_DELIVERY_MODE, Message.DEFAULT_PRIORITY, Message.DEFAULT_TIME_TO_LIVE); fail("Expected exception to be thrown"); } catch (InvalidDestinationException ide) { // expected } } }
apache-2.0
basheersubei/swift-t
lb/code/src/refcount.h
1003
/* Helper functions to manipulate refcounts from a server */ #ifndef __XLB_REFCOUNT_H #define __XLB_REFCOUNT_H #include "adlb-defs.h" #include "data.h" #include "data_internal.h" #include "notifications.h" /* Decrement reference count of given id. Must be called on a server wait: if true, wait until refcount is confirmed. */ adlb_data_code xlb_incr_refc_svr(adlb_datum_id id, adlb_refc change, adlb_notif_t *notifs, bool wait); /* Modify reference count of locally stored datum. Send any consequent notifications or messages. suppress_errors: if true, just log any errors */ adlb_data_code xlb_incr_refc_local(adlb_datum_id id, adlb_refc change, bool suppress_errors); /* Modify refcount of referenced items */ adlb_data_code xlb_incr_referand(adlb_datum_storage *d, adlb_data_type type, bool release_read, bool release_write, xlb_refc_acquire to_acquire, xlb_refc_changes *changes); #endif // __XLB_REFCOUNT_H
apache-2.0
redforks/closure-library
closure/goog/soy/data_test.js
1495
// Copyright 2014 The Closure Library Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. goog.provide('goog.soy.dataTest'); goog.setTestOnly('goog.soy.dataTest'); goog.require('goog.html.SafeHtml'); goog.require('goog.html.SafeUrl'); /** @suppress {extraRequire} */ goog.require('goog.soy.testHelper'); goog.require('goog.testing.jsunit'); function testToSafeHtml() { var html; html = example.unsanitizedTextTemplate().toSafeHtml(); assertEquals( 'I &lt;3 Puppies &amp; Kittens', goog.html.SafeHtml.unwrap(html)); html = example.sanitizedHtmlTemplate().toSafeHtml(); assertEquals('Hello <b>World</b>', goog.html.SafeHtml.unwrap(html)); } function testToSafeUrl() { var url; url = example.sanitizedSmsUrlTemplate().toSafeUrl(); assertEquals('sms:123456789', goog.html.SafeUrl.unwrap(url)); url = example.sanitizedHttpUrlTemplate().toSafeUrl(); assertEquals('https://google.com/foo?n=917', goog.html.SafeUrl.unwrap(url)); }
apache-2.0
balzaczyy/golucene
core/util/bytesRefHash.go
10103
package util import ( "fmt" ) /* BytesRefHash is a special purpose hash map like data structure optimized for BytesRef instances. BytesRefHash maintains mappings of byte arrays to ids (map[[]byte]int) sorting the hashed bytes efficiently in continuous storage. The mapping to the id is encapsulated inside BytesRefHash and is guaranteed to be increased for each added BytesRef. Note: The maximum capacity BytesRef instance passed to add() must not be longer than BYTE_BLOCK_SIZE-2. The internal storage is limited to 2GB total byte storage. */ type BytesRefHash struct { pool *ByteBlockPool bytesStart []int scratch1 *BytesRef hashSize int hashHalfSize int hashMask int count int lastCount int ids []int bytesStartArray BytesStartArray bytesUsed Counter } func NewBytesRefHash(pool *ByteBlockPool, capacity int, bytesStartArray BytesStartArray) *BytesRefHash { ids := make([]int, capacity) for i, _ := range ids { ids[i] = -1 } counter := bytesStartArray.BytesUsed() if counter == nil { counter = NewCounter() } counter.AddAndGet(int64(capacity) * NUM_BYTES_INT) return &BytesRefHash{ scratch1: NewEmptyBytesRef(), hashSize: capacity, hashHalfSize: capacity >> 1, hashMask: capacity - 1, lastCount: -1, pool: pool, ids: ids, bytesStartArray: bytesStartArray, bytesStart: bytesStartArray.Init(), bytesUsed: counter, } } /* Returns the number of values in this hash. */ func (h *BytesRefHash) Size() int { return h.count } /* Returns the ids array in arbitrary order. Valid ids start at offset of 0 and end at a limit of size() - 1 Note: This is a destructive operation. clear() must be called in order to reuse this BytesRefHash instance. */ func (h *BytesRefHash) compact() []int { assert2(h.bytesStart != nil, "bytesStart is nil - not initialized") upto := 0 for i := 0; i < h.hashSize; i++ { if h.ids[i] != -1 { if upto < i { h.ids[upto] = h.ids[i] h.ids[i] = -1 } upto++ } } assert(upto == h.count) h.lastCount = h.count return h.ids } type bytesRefIntroSorter struct { *IntroSorter owner *BytesRefHash compact []int comp func([]byte, []byte) bool pivot *BytesRef scratch1 *BytesRef scratch2 *BytesRef } func newBytesRefIntroSorter(owner *BytesRefHash, v []int, comp func([]byte, []byte) bool) *bytesRefIntroSorter { ans := &bytesRefIntroSorter{ owner: owner, compact: v, comp: comp, pivot: NewEmptyBytesRef(), scratch1: NewEmptyBytesRef(), scratch2: NewEmptyBytesRef(), } ans.IntroSorter = NewIntroSorter(ans, ans) return ans } func (a *bytesRefIntroSorter) Len() int { return len(a.compact) } func (a *bytesRefIntroSorter) Swap(i, j int) { a.compact[i], a.compact[j] = a.compact[j], a.compact[i] } func (a *bytesRefIntroSorter) Less(i, j int) bool { id1, id2 := a.compact[i], a.compact[j] assert(len(a.owner.bytesStart) > id1 && len(a.owner.bytesStart) > id2) a.owner.pool.SetBytesRef(a.scratch1, a.owner.bytesStart[id1]) a.owner.pool.SetBytesRef(a.scratch2, a.owner.bytesStart[id2]) return a.comp(a.scratch1.ToBytes(), a.scratch2.ToBytes()) } func (a *bytesRefIntroSorter) SetPivot(i int) { id := a.compact[i] assert(len(a.owner.bytesStart) > id) a.owner.pool.SetBytesRef(a.pivot, a.owner.bytesStart[id]) } func (a *bytesRefIntroSorter) PivotLess(j int) bool { id := a.compact[j] assert(len(a.owner.bytesStart) > id) a.owner.pool.SetBytesRef(a.scratch2, a.owner.bytesStart[id]) return a.comp(a.pivot.ToBytes(), a.scratch2.ToBytes()) } /* Returns the values array sorted by the referenced byte values. Note: this is a destructive operation. clear() must be called in order to reuse this BytesRefHash instance. */ func (h *BytesRefHash) Sort(comp func(a, b []byte) bool) []int { compact := h.compact() s := newBytesRefIntroSorter(h, compact, comp) s.Sort(0, h.count) // TODO remove this // for i, _ := range compact { // if compact[i+1] == -1 { // break // } // assert(!s.Less(i+1, i)) // if ok := !s.Less(i+1, i); !ok { // fmt.Println("DEBUG1", compact) // assert(ok) // } // } return compact } func (h *BytesRefHash) equals(id int, b []byte) bool { h.pool.SetBytesRef(h.scratch1, h.bytesStart[id]) return h.scratch1.bytesEquals(b) } func (h *BytesRefHash) shrink(targetSize int) bool { // Cannot use util.Shrink because we require power of 2: newSize := h.hashSize for newSize >= 8 && newSize/4 > targetSize { newSize /= 2 } if newSize != h.hashSize { h.bytesUsed.AddAndGet(NUM_BYTES_INT * -int64(h.hashSize-newSize)) h.hashSize = newSize h.ids = make([]int, h.hashSize) for i, _ := range h.ids { h.ids[i] = -1 } h.hashHalfSize = newSize / 2 h.hashMask = newSize - 1 return true } return false } /* Clears the BytesRef which maps to the given BytesRef */ func (h *BytesRefHash) Clear(resetPool bool) { h.lastCount = h.count h.count = 0 if resetPool { h.pool.Reset(false, false) // we don't need to 0-fill the bufferes } h.bytesStart = h.bytesStartArray.Clear() if h.lastCount != -1 && h.shrink(h.lastCount) { // shurnk clears the hash entries return } for i, _ := range h.ids { h.ids[i] = -1 } } type MaxBytesLengthExceededError string func (e MaxBytesLengthExceededError) Error() string { return string(e) } /* Adds a new BytesRef. */ func (h *BytesRefHash) Add(bytes []byte) (int, error) { assert2(h.bytesStart != nil, "Bytesstart is null - not initialized") length := len(bytes) // final position hashPos := h.findHash(bytes) e := h.ids[hashPos] if e == -1 { // new entry if len2 := 2 + len(bytes); len2+h.pool.ByteUpto > BYTE_BLOCK_SIZE { if len2 > BYTE_BLOCK_SIZE { return 0, MaxBytesLengthExceededError(fmt.Sprintf( "bytes can be at most %v in length; got %v", BYTE_BLOCK_SIZE-2, len(bytes))) } h.pool.NextBuffer() } buffer := h.pool.Buffer bufferUpto := h.pool.ByteUpto if h.count >= len(h.bytesStart) { h.bytesStart = h.bytesStartArray.Grow() assert2(h.count < len(h.bytesStart)+1, "count: %v len: %v", h.count, len(h.bytesStart)) } e = h.count h.count++ h.bytesStart[e] = bufferUpto + h.pool.ByteOffset // We first encode the length, followed by the bytes. Length is // encoded as vint, but will consume 1 or 2 bytes at most (we // reject too-long terms, above). if length < 128 { // 1 byte to store length buffer[bufferUpto] = byte(length) h.pool.ByteUpto += length + 1 assert2(length >= 0, "Length must be positive: %v", length) copy(buffer[bufferUpto+1:], bytes) } else { // 2 bytes to store length buffer[bufferUpto] = byte(0x80 | (length & 0x7f)) buffer[bufferUpto+1] = byte((length >> 7) & 0xff) h.pool.ByteUpto += length + 2 copy(buffer[bufferUpto+2:], bytes) } assert(h.ids[hashPos] == -1) h.ids[hashPos] = e if h.count == h.hashHalfSize { h.rehash(2*h.hashSize, true) } return e, nil } return -(e + 1), nil } func (h *BytesRefHash) findHash(bytes []byte) int { assert2(h.bytesStart != nil, "bytesStart is null - not initialized") code := h.doHash(bytes) // final position hashPos := code & h.hashMask if e := h.ids[hashPos]; e != -1 && !h.equals(e, bytes) { // conflict; use linear probe to find an open slot // (see LUCENE-5604): for { code++ hashPos = code & h.hashMask e = h.ids[hashPos] if e == -1 || h.equals(e, bytes) { break } } } return hashPos } /* Called when has is too small (> 50% occupied) or too large (< 20% occupied). */ func (h *BytesRefHash) rehash(newSize int, hashOnData bool) { newMask := newSize - 1 h.bytesUsed.AddAndGet(NUM_BYTES_INT * int64(newSize)) newHash := make([]int, newSize) for i, _ := range newHash { newHash[i] = -1 } for i := 0; i < h.hashSize; i++ { if e0 := h.ids[i]; e0 != -1 { var code int if hashOnData { off := h.bytesStart[e0] start := off & BYTE_BLOCK_MASK bytes := h.pool.Buffers[off>>BYTE_BLOCK_SHIFT] var length int var pos int if bytes[start]&0x80 == 0 { // length is 1 byte length = int(bytes[start]) pos = start + 1 } else { length = int(bytes[start]&0x7f) + (int(bytes[start+1]&0xff) << 7) pos = start + 2 } code = h.doHash(bytes[pos : pos+length]) } else { code = h.bytesStart[e0] } hashPos := code & newMask assert(hashPos >= 0) if newHash[hashPos] != -1 { // conflict; use linear probe to find an open slot // (see LUCENE-5604) for { code++ hashPos = code & newMask if newHash[hashPos] == -1 { break } } } assert(newHash[hashPos] == -1) newHash[hashPos] = e0 } } h.hashMask = newMask h.bytesUsed.AddAndGet(NUM_BYTES_INT * int64(-len(h.ids))) h.ids = newHash h.hashSize = newSize h.hashHalfSize = newSize / 2 } func (h *BytesRefHash) doHash(p []byte) int { return int(MurmurHash3_x86_32(p, GOOD_FAST_HASH_SEED)) } /* reinitializes the BytesRefHash after a previous clear() call. If clear() has not been called previously this method has no effect. */ func (h *BytesRefHash) Reinit() { if h.bytesStart == nil { h.bytesStart = h.bytesStartArray.Init() } if h.ids == nil { h.ids = make([]int, h.hashSize) h.bytesUsed.AddAndGet(NUM_BYTES_INT * int64(h.hashSize)) } } /* Returns the bytesStart offset into the internally used ByteBlockPool for the given bytesID. */ func (h *BytesRefHash) ByteStart(bytesId int) int { assert2(h.bytesStart != nil, "bytesStart is null - not initialized") assert2(bytesId >= 0 && bytesId <= h.count, "%v", bytesId) return h.bytesStart[bytesId] } /* Manages allocation of per-term addresses. */ type BytesStartArray interface { // Initializes the BytesStartArray. This call will allocate memory Init() []int // A Counter reference holding the number of bytes used by this // BytesStartArray. The BytesRefHash uses this reference to track // its memory usage BytesUsed() Counter // Grows the BytesStartArray Grow() []int // clears the BytesStartArray and returns the cleared instance. Clear() []int }
apache-2.0
kfirlevari/zookeeper
src/java/main/org/apache/zookeeper/server/WatchManager.java
8715
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server; import java.io.PrintWriter; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.Watcher.Event.EventType; import org.apache.zookeeper.Watcher.Event.KeeperState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class manages watches. It allows watches to be associated with a string * and removes watchers and their watches in addition to managing triggers. */ class WatchManager { private static final Logger LOG = LoggerFactory.getLogger(WatchManager.class); private final Map<String, Set<Watcher>> watchTable = new HashMap<String, Set<Watcher>>(); private final Map<Watcher, Set<String>> watch2Paths = new HashMap<Watcher, Set<String>>(); synchronized int size(){ int result = 0; for(Set<Watcher> watches : watchTable.values()) { result += watches.size(); } return result; } synchronized void addWatch(String path, Watcher watcher) { Set<Watcher> list = watchTable.get(path); if (list == null) { // don't waste memory if there are few watches on a node // rehash when the 4th entry is added, doubling size thereafter // seems like a good compromise list = new HashSet<Watcher>(4); watchTable.put(path, list); } list.add(watcher); Set<String> paths = watch2Paths.get(watcher); if (paths == null) { // cnxns typically have many watches, so use default cap here paths = new HashSet<String>(); watch2Paths.put(watcher, paths); } paths.add(path); } synchronized void removeWatcher(Watcher watcher) { Set<String> paths = watch2Paths.remove(watcher); if (paths == null) { return; } for (String p : paths) { Set<Watcher> list = watchTable.get(p); if (list != null) { list.remove(watcher); if (list.size() == 0) { watchTable.remove(p); } } } } Set<Watcher> triggerWatch(String path, EventType type) { return triggerWatch(path, type, null); } Set<Watcher> triggerWatch(String path, EventType type, Set<Watcher> supress) { WatchedEvent e = new WatchedEvent(type, KeeperState.SyncConnected, path); Set<Watcher> watchers; synchronized (this) { watchers = watchTable.remove(path); if (watchers == null || watchers.isEmpty()) { if (LOG.isTraceEnabled()) { ZooTrace.logTraceMessage(LOG, ZooTrace.EVENT_DELIVERY_TRACE_MASK, "No watchers for " + path); } return null; } for (Watcher w : watchers) { Set<String> paths = watch2Paths.get(w); if (paths != null) { paths.remove(path); } } } for (Watcher w : watchers) { if (supress != null && supress.contains(w)) { continue; } w.process(e); } return watchers; } /** * Brief description of this object. */ @Override public synchronized String toString() { StringBuilder sb = new StringBuilder(); sb.append(watch2Paths.size()).append(" connections watching ") .append(watchTable.size()).append(" paths\n"); int total = 0; for (Set<String> paths : watch2Paths.values()) { total += paths.size(); } sb.append("Total watches:").append(total); return sb.toString(); } /** * String representation of watches. Warning, may be large! * @param byPath iff true output watches by paths, otw output * watches by connection * @return string representation of watches */ synchronized void dumpWatches(PrintWriter pwriter, boolean byPath) { if (byPath) { for (Entry<String, Set<Watcher>> e : watchTable.entrySet()) { pwriter.println(e.getKey()); for (Watcher w : e.getValue()) { pwriter.print("\t0x"); pwriter.print(Long.toHexString(((ServerCnxn)w).getSessionId())); pwriter.print("\n"); } } } else { for (Entry<Watcher, Set<String>> e : watch2Paths.entrySet()) { pwriter.print("0x"); pwriter.println(Long.toHexString(((ServerCnxn)e.getKey()).getSessionId())); for (String path : e.getValue()) { pwriter.print("\t"); pwriter.println(path); } } } } /** * Checks the specified watcher exists for the given path * * @param path * znode path * @param watcher * watcher object reference * @return true if the watcher exists, false otherwise */ synchronized boolean containsWatcher(String path, Watcher watcher) { Set<String> paths = watch2Paths.get(watcher); if (paths == null || !paths.contains(path)) { return false; } return true; } /** * Removes the specified watcher for the given path * * @param path * znode path * @param watcher * watcher object reference * @return true if the watcher successfully removed, false otherwise */ synchronized boolean removeWatcher(String path, Watcher watcher) { Set<String> paths = watch2Paths.get(watcher); if (paths == null || !paths.remove(path)) { return false; } Set<Watcher> list = watchTable.get(path); if (list == null || !list.remove(watcher)) { return false; } if (list.size() == 0) { watchTable.remove(path); } return true; } /** * Returns a watch report. * * @return watch report * @see WatchesReport */ synchronized WatchesReport getWatches() { Map<Long, Set<String>> id2paths = new HashMap<Long, Set<String>>(); for (Entry<Watcher, Set<String>> e: watch2Paths.entrySet()) { Long id = ((ServerCnxn) e.getKey()).getSessionId(); Set<String> paths = new HashSet<String>(e.getValue()); id2paths.put(id, paths); } return new WatchesReport(id2paths); } /** * Returns a watch report by path. * * @return watch report * @see WatchesPathReport */ synchronized WatchesPathReport getWatchesByPath() { Map<String, Set<Long>> path2ids = new HashMap<String, Set<Long>>(); for (Entry<String, Set<Watcher>> e : watchTable.entrySet()) { Set<Long> ids = new HashSet<Long>(e.getValue().size()); path2ids.put(e.getKey(), ids); for (Watcher watcher : e.getValue()) { ids.add(((ServerCnxn) watcher).getSessionId()); } } return new WatchesPathReport(path2ids); } /** * Returns a watch summary. * * @return watch summary * @see WatchesSummary */ synchronized WatchesSummary getWatchesSummary() { int totalWatches = 0; for (Set<String> paths : watch2Paths.values()) { totalWatches += paths.size(); } return new WatchesSummary (watch2Paths.size(), watchTable.size(), totalWatches); } }
apache-2.0
yida-lxw/solr-5.3.1
lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/processors/RemoveEmptyNonLeafQueryNodeProcessor.java
3319
package org.apache.lucene.queryparser.flexible.standard.processors; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.LinkedList; import java.util.List; import org.apache.lucene.queryparser.flexible.core.QueryNodeException; import org.apache.lucene.queryparser.flexible.core.nodes.GroupQueryNode; import org.apache.lucene.queryparser.flexible.core.nodes.MatchNoDocsQueryNode; import org.apache.lucene.queryparser.flexible.core.nodes.ModifierQueryNode; import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode; import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl; /** * This processor removes every {@link QueryNode} that is not a leaf and has not * children. If after processing the entire tree the root node is not a leaf and * has no children, a {@link MatchNoDocsQueryNode} object is returned. * <br> * This processor is used at the end of a pipeline to avoid invalid query node * tree structures like a {@link GroupQueryNode} or {@link ModifierQueryNode} * with no children. * * @see QueryNode * @see MatchNoDocsQueryNode */ public class RemoveEmptyNonLeafQueryNodeProcessor extends QueryNodeProcessorImpl { private LinkedList<QueryNode> childrenBuffer = new LinkedList<>(); public RemoveEmptyNonLeafQueryNodeProcessor() { // empty constructor } @Override public QueryNode process(QueryNode queryTree) throws QueryNodeException { queryTree = super.process(queryTree); if (!queryTree.isLeaf()) { List<QueryNode> children = queryTree.getChildren(); if (children == null || children.size() == 0) { return new MatchNoDocsQueryNode(); } } return queryTree; } @Override protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException { return node; } @Override protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException { return node; } @Override protected List<QueryNode> setChildrenOrder(List<QueryNode> children) throws QueryNodeException { try { for (QueryNode child : children) { if (!child.isLeaf()) { List<QueryNode> grandChildren = child.getChildren(); if (grandChildren != null && grandChildren.size() > 0) { this.childrenBuffer.add(child); } } else { this.childrenBuffer.add(child); } } children.clear(); children.addAll(this.childrenBuffer); } finally { this.childrenBuffer.clear(); } return children; } }
apache-2.0
goodwinnk/intellij-community
platform/xdebugger-api/src/com/intellij/xdebugger/breakpoints/ui/XBreakpointGroup.java
1450
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.breakpoints.ui; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author nik */ public abstract class XBreakpointGroup implements Comparable<XBreakpointGroup> { @Nullable public Icon getIcon(boolean isOpen) { return null; } @NotNull public abstract String getName(); @Override public String toString() { return getName(); } @Override public boolean equals(Object obj) { if (obj == this) return true; if (obj == null) return false; return (getClass() == obj.getClass()) && compareTo((XBreakpointGroup)obj) == 0; } @Override public int compareTo(final XBreakpointGroup o) { return getName().compareTo(o.getName()); } @Override public int hashCode() { return getName().hashCode(); } }
apache-2.0
SQCLabs/openui5
src/sap.m/test/sap/m/qunit/Switch.qunit.html
1382
<!DOCTYPE html> <html> <head> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no"> <title>Test Page for sap.m.Switch</title> <script src="../shared-config.js"></script> <script id="sap-ui-bootstrap" data-sap-ui-noConflict="true" src="../../../../resources/sap-ui-core.js" data-sap-ui-libs="sap.m"> </script> <link rel="stylesheet" href="../../../../resources/sap/ui/thirdparty/qunit.css" type="text/css" media="screen"> <script src="../../../../resources/sap/ui/thirdparty/qunit.js"></script> <script src="../../../../resources/sap/ui/qunit/qunit-junit.js"></script> <script src="../../../../resources/sap/ui/qunit/qunit-coverage.js"></script>​ <script src="../../../../resources/sap/ui/qunit/QUnitUtils.js"></script> <script src="../../../../resources/sap/ui/thirdparty/sinon.js"></script> <script src="../../../../resources/sap/ui/thirdparty/sinon-qunit.js"></script> <script src="Switch.qunit.js"></script> </head> <body id="body" class="sapUiBody"> <h1 id="qunit-header">QUnit tests: sap.m.Switch</h1> <h2 id="qunit-banner"></h2> <h2 id="qunit-userAgent"></h2> <div id="qunit-testrunner-toolbar"></div> <ol id="qunit-tests"></ol> <div id="content" class="content"></div> </body> </html>
apache-2.0
jeffposnick/WebFundamentals
src/content/en/shows/ttt/series-1/cmd-line-shortcuts.markdown
523
--- layout: shows/episode title: "Totally Tooling Mini Tip: Command-line Keyboard Shortcuts" description: "In this cheeky little mini tip Addy and Matt look at keyboard shortcuts that help you navigate the command line with ease." youtubeID: ND-W4e-pqMo published_on: 2015-06-24 updated_on: 2015-06-24 --- In this cheeky little mini tip Addy and Matt look at keyboard shortcuts that help you navigate the command line with ease. Subscribe to the Google Developers channel at: [http://goo.gl/mQyv5L](http://goo.gl/mQyv5L)
apache-2.0
goldmansachs/reladomo
reladomo/src/main/java/com/gs/fw/common/mithra/portal/UpdateDataChooser.java
885
/* Copyright 2016 Goldman Sachs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.gs.fw.common.mithra.portal; import com.gs.fw.common.mithra.MithraDataObject; import com.gs.fw.common.mithra.MithraTransactionalObject; public interface UpdateDataChooser { public MithraDataObject chooseDataForMultiUpdate(MithraTransactionalObject mithraObject); }
apache-2.0
devigned/azure-powershell
src/ResourceManager/LogicApp/Commands.LogicApp/Cmdlets/IntegrationAccount/UpdateAzureIntegrationAccountMapCommand.cs
6849
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- namespace Microsoft.Azure.Commands.LogicApp.Cmdlets { using System; using System.Management.Automation; using Microsoft.Azure.Commands.LogicApp.Utilities; using Microsoft.Azure.Management.Logic.Models; using Microsoft.WindowsAzure.Commands.Utilities.Common; using System.Globalization; using ResourceManager.Common.ArgumentCompleters; /// <summary> /// Updates the integration account map. /// </summary> [Cmdlet(VerbsCommon.Set, "AzureRmIntegrationAccountMap", SupportsShouldProcess = true)] [OutputType(typeof(IntegrationAccountMap))] public class UpdateAzureIntegrationAccountMapCommand : LogicAppBaseCmdlet { #region Defaults /// <summary> /// Default content type for map. /// </summary> private string contentType = "application/xml"; /// <summary> /// Default map type. /// </summary> private string mapType = "Xslt"; #endregion Defaults #region Input Paramters [Parameter(Mandatory = true, HelpMessage = "The integration account resource group name.", ValueFromPipelineByPropertyName = true)] [ResourceGroupCompleter] [ValidateNotNullOrEmpty] public string ResourceGroupName { get; set; } [Parameter(Mandatory = true, HelpMessage = "The integration account name.", ValueFromPipelineByPropertyName = true)] [ValidateNotNullOrEmpty] [Alias("IntegrationAccountName", "ResourceName")] public string Name { get; set; } [Parameter(Mandatory = true, HelpMessage = "The integration account map name.", ValueFromPipelineByPropertyName = true)] [ValidateNotNullOrEmpty] public string MapName { get; set; } [Parameter(Mandatory = false, HelpMessage = "The integration account map file path.")] [ValidateNotNullOrEmpty] public string MapFilePath { get; set; } [Parameter(Mandatory = false, HelpMessage = "The integration account map definition.")] [ValidateNotNullOrEmpty] public string MapDefinition { get; set; } [Parameter(Mandatory = false, HelpMessage = "The integration account map type.")] [ValidateSet("Xslt", IgnoreCase = false)] [ValidateNotNullOrEmpty] public string MapType { get { return this.mapType; } set { value = this.mapType; } } [Parameter(Mandatory = false, HelpMessage = "The integration account map content type.")] [ValidateNotNullOrEmpty] public string ContentType { get { return this.contentType; } set { value = this.contentType; } } [Parameter(Mandatory = false, HelpMessage = "The integration account map metadata.", ValueFromPipelineByPropertyName = false)] [ValidateNotNullOrEmpty] public object Metadata { get; set; } [Parameter(Mandatory = false, HelpMessage = "Do not ask for confirmation.")] public SwitchParameter Force { get; set; } #endregion Input Parameters /// <summary> /// Executes the integration account map update command. /// </summary> public override void ExecuteCmdlet() { base.ExecuteCmdlet(); var integrationAccount = IntegrationAccountClient.GetIntegrationAccount(this.ResourceGroupName, this.Name); var integrationAccountMap = IntegrationAccountClient.GetIntegrationAccountMap(this.ResourceGroupName, this.Name, this.MapName); var integrationAccountMapCopy = new IntegrationAccountMap(mapType: integrationAccountMap.MapType, id: integrationAccountMap.Id, name: integrationAccountMap.Name, type: integrationAccountMap.Type, location: integrationAccountMap.Location, tags: integrationAccountMap.Tags, parametersSchema: integrationAccountMap.ParametersSchema, createdTime: integrationAccountMap.CreatedTime, changedTime: integrationAccountMap.ChangedTime, content: integrationAccountMap.Content, contentLink: null, metadata: integrationAccountMap.Metadata); if (!string.IsNullOrEmpty(this.MapFilePath)) { integrationAccountMapCopy.Content = CmdletHelper.GetContentFromFile(this.TryResolvePath(this.MapFilePath)); } if (!string.IsNullOrEmpty(this.MapDefinition)) { integrationAccountMapCopy.Content = this.MapDefinition; CmdletHelper.GetContentFromFile(this.TryResolvePath(this.MapFilePath)); } if (!string.IsNullOrEmpty(this.ContentType)) { integrationAccountMapCopy.ContentType = this.contentType; } if (!string.IsNullOrEmpty(this.MapType)) { integrationAccountMapCopy.MapType = (MapType)Enum.Parse(typeof(MapType), this.MapType); } if (this.Metadata != null) { integrationAccountMapCopy.Metadata = CmdletHelper.ConvertToMetadataJObject(this.Metadata); } ConfirmAction(Force.IsPresent, string.Format(CultureInfo.InvariantCulture, Properties.Resource.UpdateResourceWarning, "Microsoft.Logic/integrationAccounts/maps", this.Name), string.Format(CultureInfo.InvariantCulture, Properties.Resource.UpdateResourceMessage, "Microsoft.Logic/integrationAccounts/maps", this.Name), Name, () => { this.WriteObject( IntegrationAccountClient.UpdateIntegrationAccountMap(this.ResourceGroupName, this.Name, this.MapName, integrationAccountMapCopy), true); }, null); } } }
apache-2.0
baishuo/hbase-1.0.0-cdh5.4.7_baishuo
hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
14404
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce.replication; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HConnectable; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.mapreduce.TableSplit; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationFactory; import org.apache.hadoop.hbase.replication.ReplicationPeerZKImpl; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeers; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; /** * This map-only job compares the data from a local table with a remote one. * Every cell is compared and must have exactly the same keys (even timestamp) * as well as same value. It is possible to restrict the job by time range and * families. The peer id that's provided must match the one given when the * replication stream was setup. * <p> * Two counters are provided, Verifier.Counters.GOODROWS and BADROWS. The reason * for a why a row is different is shown in the map's log. */ public class VerifyReplication extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(VerifyReplication.class); public final static String NAME = "verifyrep"; static long startTime = 0; static long endTime = Long.MAX_VALUE; static int versions = -1; static String tableName = null; static String families = null; static String peerId = null; /** * Map-only comparator for 2 tables */ public static class Verifier extends TableMapper<ImmutableBytesWritable, Put> { public static enum Counters { GOODROWS, BADROWS, ONLY_IN_SOURCE_TABLE_ROWS, ONLY_IN_PEER_TABLE_ROWS, CONTENT_DIFFERENT_ROWS} private ResultScanner replicatedScanner; private Result currentCompareRowInPeerTable; /** * Map method that compares every scanned row with the equivalent from * a distant cluster. * @param row The current table row key. * @param value The columns. * @param context The current context. * @throws IOException When something is broken with the data. */ @Override public void map(ImmutableBytesWritable row, final Result value, Context context) throws IOException { if (replicatedScanner == null) { Configuration conf = context.getConfiguration(); final Scan scan = new Scan(); scan.setCaching(conf.getInt(TableInputFormat.SCAN_CACHEDROWS, 1)); long startTime = conf.getLong(NAME + ".startTime", 0); long endTime = conf.getLong(NAME + ".endTime", Long.MAX_VALUE); String families = conf.get(NAME + ".families", null); if(families != null) { String[] fams = families.split(","); for(String fam : fams) { scan.addFamily(Bytes.toBytes(fam)); } } scan.setTimeRange(startTime, endTime); if (versions >= 0) { scan.setMaxVersions(versions); } final TableSplit tableSplit = (TableSplit)(context.getInputSplit()); HConnectionManager.execute(new HConnectable<Void>(conf) { @Override public Void connect(HConnection conn) throws IOException { String zkClusterKey = conf.get(NAME + ".peerQuorumAddress"); Configuration peerConf = HBaseConfiguration.create(conf); ZKUtil.applyClusterKeyToConf(peerConf, zkClusterKey); TableName tableName = TableName.valueOf(conf.get(NAME + ".tableName")); // TODO: THis HTable doesn't get closed. Fix! Table replicatedTable = new HTable(peerConf, tableName); scan.setStartRow(value.getRow()); scan.setStopRow(tableSplit.getEndRow()); replicatedScanner = replicatedTable.getScanner(scan); return null; } }); currentCompareRowInPeerTable = replicatedScanner.next(); } while (true) { if (currentCompareRowInPeerTable == null) { // reach the region end of peer table, row only in source table logFailRowAndIncreaseCounter(context, Counters.ONLY_IN_SOURCE_TABLE_ROWS, value); break; } int rowCmpRet = Bytes.compareTo(value.getRow(), currentCompareRowInPeerTable.getRow()); if (rowCmpRet == 0) { // rowkey is same, need to compare the content of the row try { Result.compareResults(value, currentCompareRowInPeerTable); context.getCounter(Counters.GOODROWS).increment(1); } catch (Exception e) { logFailRowAndIncreaseCounter(context, Counters.CONTENT_DIFFERENT_ROWS, value); } currentCompareRowInPeerTable = replicatedScanner.next(); break; } else if (rowCmpRet < 0) { // row only exists in source table logFailRowAndIncreaseCounter(context, Counters.ONLY_IN_SOURCE_TABLE_ROWS, value); break; } else { // row only exists in peer table logFailRowAndIncreaseCounter(context, Counters.ONLY_IN_PEER_TABLE_ROWS, currentCompareRowInPeerTable); currentCompareRowInPeerTable = replicatedScanner.next(); } } } private void logFailRowAndIncreaseCounter(Context context, Counters counter, Result row) { context.getCounter(counter).increment(1); context.getCounter(Counters.BADROWS).increment(1); LOG.error(counter.toString() + ", rowkey=" + Bytes.toString(row.getRow())); } @Override protected void cleanup(Context context) { if (replicatedScanner != null) { try { while (currentCompareRowInPeerTable != null) { logFailRowAndIncreaseCounter(context, Counters.ONLY_IN_PEER_TABLE_ROWS, currentCompareRowInPeerTable); currentCompareRowInPeerTable = replicatedScanner.next(); } } catch (Exception e) { LOG.error("fail to scan peer table in cleanup", e); } finally { replicatedScanner.close(); replicatedScanner = null; } } } } private static String getPeerQuorumAddress(final Configuration conf) throws IOException { ZooKeeperWatcher localZKW = null; ReplicationPeerZKImpl peer = null; try { localZKW = new ZooKeeperWatcher(conf, "VerifyReplication", new Abortable() { @Override public void abort(String why, Throwable e) {} @Override public boolean isAborted() {return false;} }); ReplicationPeers rp = ReplicationFactory.getReplicationPeers(localZKW, conf, localZKW); rp.init(); Pair<ReplicationPeerConfig, Configuration> pair = rp.getPeerConf(peerId); if (pair == null) { throw new IOException("Couldn't get peer conf!"); } Configuration peerConf = rp.getPeerConf(peerId).getSecond(); return ZKUtil.getZooKeeperClusterKey(peerConf); } catch (ReplicationException e) { throw new IOException( "An error occured while trying to connect to the remove peer cluster", e); } finally { if (peer != null) { peer.close(); } if (localZKW != null) { localZKW.close(); } } } /** * Sets up the actual job. * * @param conf The current configuration. * @param args The command line parameters. * @return The newly created job. * @throws java.io.IOException When setting up the job fails. */ public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException { if (!doCommandLine(args)) { return null; } if (!conf.getBoolean(HConstants.REPLICATION_ENABLE_KEY, HConstants.REPLICATION_ENABLE_DEFAULT)) { throw new IOException("Replication needs to be enabled to verify it."); } conf.set(NAME+".peerId", peerId); conf.set(NAME+".tableName", tableName); conf.setLong(NAME+".startTime", startTime); conf.setLong(NAME+".endTime", endTime); if (families != null) { conf.set(NAME+".families", families); } String peerQuorumAddress = getPeerQuorumAddress(conf); conf.set(NAME + ".peerQuorumAddress", peerQuorumAddress); LOG.info("Peer Quorum Address: " + peerQuorumAddress); Job job = new Job(conf, NAME + "_" + tableName); job.setJarByClass(VerifyReplication.class); Scan scan = new Scan(); scan.setTimeRange(startTime, endTime); if (versions >= 0) { scan.setMaxVersions(versions); } if(families != null) { String[] fams = families.split(","); for(String fam : fams) { scan.addFamily(Bytes.toBytes(fam)); } } TableMapReduceUtil.initTableMapperJob(tableName, scan, Verifier.class, null, null, job); // Obtain the auth token from peer cluster TableMapReduceUtil.initCredentialsForCluster(job, peerQuorumAddress); job.setOutputFormatClass(NullOutputFormat.class); job.setNumReduceTasks(0); return job; } private static boolean doCommandLine(final String[] args) { if (args.length < 2) { printUsage(null); return false; } try { for (int i = 0; i < args.length; i++) { String cmd = args[i]; if (cmd.equals("-h") || cmd.startsWith("--h")) { printUsage(null); return false; } final String startTimeArgKey = "--starttime="; if (cmd.startsWith(startTimeArgKey)) { startTime = Long.parseLong(cmd.substring(startTimeArgKey.length())); continue; } final String endTimeArgKey = "--endtime="; if (cmd.startsWith(endTimeArgKey)) { endTime = Long.parseLong(cmd.substring(endTimeArgKey.length())); continue; } final String versionsArgKey = "--versions="; if (cmd.startsWith(versionsArgKey)) { versions = Integer.parseInt(cmd.substring(versionsArgKey.length())); continue; } final String familiesArgKey = "--families="; if (cmd.startsWith(familiesArgKey)) { families = cmd.substring(familiesArgKey.length()); continue; } if (i == args.length-2) { peerId = cmd; } if (i == args.length-1) { tableName = cmd; } } } catch (Exception e) { e.printStackTrace(); printUsage("Can't start because " + e.getMessage()); return false; } return true; } /* * @param errorMsg Error message. Can be null. */ private static void printUsage(final String errorMsg) { if (errorMsg != null && errorMsg.length() > 0) { System.err.println("ERROR: " + errorMsg); } System.err.println("Usage: verifyrep [--starttime=X]" + " [--stoptime=Y] [--families=A] <peerid> <tablename>"); System.err.println(); System.err.println("Options:"); System.err.println(" starttime beginning of the time range"); System.err.println(" without endtime means from starttime to forever"); System.err.println(" endtime end of the time range"); System.err.println(" versions number of cell versions to verify"); System.err.println(" families comma-separated list of families to copy"); System.err.println(); System.err.println("Args:"); System.err.println(" peerid Id of the peer used for verification, must match the one given for replication"); System.err.println(" tablename Name of the table to verify"); System.err.println(); System.err.println("Examples:"); System.err.println(" To verify the data replicated from TestTable for a 1 hour window with peer #5 "); System.err.println(" $ bin/hbase " + "org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication" + " --starttime=1265875194289 --endtime=1265878794289 5 TestTable "); } @Override public int run(String[] args) throws Exception { Configuration conf = this.getConf(); Job job = createSubmittableJob(conf, args); if (job != null) { return job.waitForCompletion(true) ? 0 : 1; } return 1; } /** * Main entry point. * * @param args The command line parameters. * @throws Exception When running the job fails. */ public static void main(String[] args) throws Exception { int res = ToolRunner.run(HBaseConfiguration.create(), new VerifyReplication(), args); System.exit(res); } }
apache-2.0
vik0803/hd.rustem
inc/new.php
15273
<?php session_start(); include_once("../functions.inc.php"); $CONF['title_header']=lang('NEW_title')." - ".$CONF['name_of_firm']; if (validate_user($_SESSION['helpdesk_user_id'], $_SESSION['code'])) { if ($_SESSION['helpdesk_user_id']) { include("head.inc.php"); include("navbar.inc.php"); //check_unlinked_file(); ?> <div class="container" id="form_add"> <input type="hidden" id="main_last_new_ticket" value="<?=get_last_ticket_new($_SESSION['helpdesk_user_id']);?>"> <div class="row" style="padding-bottom:20px;"> <div class="col-md-8"> <center><h3><i class="fa fa-tag"></i> <?=lang('NEW_title');?></h3></center></div> </div> <div class="row" style="padding-bottom:20px;"> <div class="col-md-8" id="div_new"> <?php if (isset($_GET['ok'])) { if (isset($_GET['h'])) {$h=$_GET['h'];} ?> <div class="alert alert-success alert-dismissable"> <button type="button" class="close" data-dismiss="alert" aria-hidden="true">&times;</button> <strong><i class="fa fa-check"></i> <?=lang('NEW_ok');?></strong> <?=lang('NEW_ok_1');?> <a class="alert-link" href="<?=$CONF['hostname']?>ticket?<?=$h;?>"><?=lang('NEW_ok_2');?></a> <?=lang('NEW_ok_3');?> <a class="alert-link" href="<?=$CONF['hostname']?>print_ticket?<?=$h;?>"target="_blank"> <?=lang('NEW_ok_4');?></a>. </div> <?php } ?> <div class="panel panel-success" style="padding:20px;"> <div class="panel-body"> <div class="form-horizontal" id="main_form" novalidate="" action="" method="post"> <div class="control-group"> <div class="controls"> <div class="form-group" id="for_fio"> <label for="fio" class="col-sm-2 control-label" data-toggle="tooltip" data-placement="top" title="<?=lang('NEW_from_desc');?>"><small><?=lang('NEW_from');?>: </small></label> <div class="col-sm-10"> <input type="text" name="fio" class="form-control input-sm" id="fio" placeholder="<?=lang('NEW_fio');?>" autofocus data-toggle="popover" data-trigger="manual" data-html="true" data-placement="right" data-content="<small><?=lang('NEW_fio_desc');?></small>"> </div> </div></div> <hr> <div class="form-group" id="for_to" data-toggle="popover" data-html="true" data-trigger="manual" data-placement="right"> <label for="to" class="col-md-2 control-label" data-toggle="tooltip" data-placement="top" title="<?=lang('NEW_to_desc');?>"><small><?=lang('NEW_to');?>: </small></label> <div class="col-md-6"> <select data-placeholder="<?=lang('NEW_to_unit');?>" class="chosen-select form-control" id="to" name="unit_id"> <option value="0"></option> <?php /*$qstring = "SELECT name as label, id as value FROM deps where id !='0' ;"; $result = mysql_query($qstring);//query the database for entries containing the while ($row = mysql_fetch_array($result,MYSQL_ASSOC)) { */ $stmt = $dbConnection->prepare('SELECT name as label, id as value FROM deps where id !=:n AND status=:s'); $stmt->execute(array(':n'=>'0',':s'=>'1')); $res1 = $stmt->fetchAll(); foreach($res1 as $row) { //echo($row['label']); $row['label']=$row['label']; $row['value']=(int)$row['value']; ?> <option value="<?=$row['value']?>"><?=$row['label']?></option> <?php } ?> </select> </div> <div class="col-md-4" style="" id="dsd" data-toggle="popover" data-html="true" data-trigger="manual" data-placement="right" data-content="<small><?=lang('NEW_to_unit_desc');?></small>"> <select data-placeholder="<?=lang('NEW_to_user');?>" id="users_do" name="unit_id"> <option></option> <?php /* $qstring = "SELECT fio as label, id as value FROM users where status='1' and login !='system' order by fio ASC;"; $result = mysql_query($qstring);//query the database for entries containing the term while ($row = mysql_fetch_array($result,MYSQL_ASSOC)){ */ $stmt = $dbConnection->prepare('SELECT fio as label, id as value FROM users where status=:n and login !=:system order by fio ASC'); $stmt->execute(array(':n'=>'1',':system'=>'system')); $res1 = $stmt->fetchAll(); foreach($res1 as $row) { //echo($row['label']); $row['label']=$row['label']; $row['value']=(int)$row['value']; if (get_user_status_text($row['value']) == "online") {$s="status-online-icon";} else if (get_user_status_text($row['value']) == "offline") {$s="status-offline-icon";} ?> <option data-foo="<?=$s;?>" value="<?=$row['value']?>"><?=nameshort($row['label'])?> </option> <?php } ?> </select> </div> </div> </div> <div class="control-group" id="for_prio"> <div class="controls"> <div class="form-group"> <label for="" class="col-sm-2 control-label"><small><?=lang('NEW_prio');?>: </small></label> <div class="col-sm-10" style=" padding-top: 5px; "> <div class="btn-group btn-group-justified"> <div class="btn-group"> <button type="button" class="btn btn-primary btn-xs" id="prio_low"><i id="lprio_low" class=""></i><?=lang('NEW_prio_low');?></button> </div> <div class="btn-group"> <button type="button" class="btn btn-info btn-xs active" id="prio_normal"><i id="lprio_norm" class="fa fa-check"></i> <?=lang('NEW_prio_norm');?></button> </div> <div class="btn-group"> <button type="button" class="btn btn-danger btn-xs" data-toggle="tooltip" data-placement="top" title="<?=lang('NEW_prio_high_desc');?>" id="prio_high"><i id="lprio_high" class=""></i><?=lang('NEW_prio_high');?></button> </div> </div> </div></div></div></div> <?php /* */ if ($CONF['fix_subj'] == "false") { ?> <div class="control-group" id="for_subj"> <div class="controls"> <div class="form-group"> <label for="subj" class="col-sm-2 control-label"><small><?=lang('NEW_subj');?>: </small></label> <div class="col-sm-10"> <input type="text" class="form-control input-sm" name="subj" id="subj" placeholder="<?=lang('NEW_subj');?>" data-toggle="popover" data-html="true" data-trigger="manual" data-placement="right" data-content="<small><?=lang('NEW_subj_msg');?></small>"> </div> </div></div></div> <?php } else if ($CONF['fix_subj'] == "true") { ?> <div class="control-group" id="for_subj" data-toggle="popover" data-html="true" data-trigger="manual" data-placement="right" data-content="<small><?=lang('NEW_subj_msg');?></small>"> <div class="controls"> <div class="form-group"> <label for="subj" class="col-sm-2 control-label"><small><?=lang('NEW_subj');?>: </small></label> <div class="col-sm-10" style=""> <select data-placeholder="<?=lang('NEW_subj_det');?>" class="chosen-select form-control input-sm" id="subj" name="subj"> <option value="0"></option> <?php /*$qstring = "SELECT name FROM subj order by name COLLATE utf8_unicode_ci ASC"; $result = mysql_query($qstring);//query the database for entries containing the term while ($row = mysql_fetch_array($result,MYSQL_ASSOC)) { */ $stmt = $dbConnection->prepare('SELECT name FROM subj order by name COLLATE utf8_unicode_ci ASC'); $stmt->execute(); $res1 = $stmt->fetchAll(); foreach($res1 as $row) { ?> <option value="<?=$row['name']?>"><?=$row['name']?></option> <?php } ?> </select> </div> </div> </div> </div> <?php } ?> <div class="control-group"> <div class="controls"> <div class="form-group" id="for_msg"> <label for="msg" class="col-sm-2 control-label"><small><?=lang('NEW_MSG');?>:</small></label> <div class="col-sm-10"> <textarea data-toggle="popover" data-html="true" data-trigger="manual" data-placement="right" data-content="<small><?=lang('NEW_MSG_msg');?></small>" placeholder="<?=lang('NEW_MSG_ph');?>" class="form-control input-sm animated" name="msg" id="msg" rows="3" required="" data-validation-required-message="Укажите сообщение" aria-invalid="false"></textarea> </div> </div> <div class="help-block"></div></div></div> <?php if ($CONF['file_uploads'] == "true") { ?> <div class="control-group"> <div class="controls"> <div class="form-group"> <label for="" class="col-sm-2 control-label"><small><?=lang('TICKET_file_add');?>:</small></label> <div class="col-sm-10"> <form id="fileupload" action="" method="POST" enctype="multipart/form-data"> <div class="fileupload-buttonbar"> <div class=""> <!-- The fileinput-button span is used to style the file input field as button --> <span class="btn btn-success fileinput-button btn-xs"> <i class="glyphicon glyphicon-plus"></i> <span><?=lang('TICKET_file_upload')?></span> <input id="filer" type="file" name="files[]" multiple> </span> <!--button data-toggle="popover" data-html="true" data-trigger="manual" data-placement="top" data-content="<small><?=lang('upload_not_u')?></small>" type="submit" class="btn btn-primary start btn-xs" id="start_upload"> <i class="glyphicon glyphicon-upload"></i> <span><?=lang('TICKET_file_startupload');?></span> </button> <button type="reset" class="btn btn-warning cancel btn-xs"> <i class="glyphicon glyphicon-ban-circle"></i> <span><?=lang('TICKET_file_notupload')?></span> </button--><br> <small class="text-muted"><?=lang('TICKET_file_upload_msg');?></small> <!-- The global file processing state --> <span class="fileupload-process"></span> </div> </div> <!-- The table listing the files available for upload/download --> <table role="presentation" class="table table-striped"><tbody class="files"></tbody></table> </form> </div> </div> </div> </div> <?php } ?> <div class="col-md-2"></div> <div class="col-md-10" id="processing"> <div class="btn-group btn-group-justified"> <div class="btn-group"> <button id="enter_ticket" class="btn btn-success" type="button"><i class="fa fa-check-circle-o"></i> <?=lang('NEW_button_create');?></button> </div> <div class="btn-group"> <button id="reset_ticket" class="btn btn-default" type="submit"><i class="fa fa-eraser"></i> <?=lang('NEW_button_reset');?></button> </div> </div> <input type="hidden" id="file_array" value=""> <input type="hidden" id="client_id_param" value=""> <input type="hidden" id="hashname" value="<?=md5(time());?>"> <input type="hidden" id="status_action" value=""> <input type="hidden" id="prio" value="1"> <input type="hidden" value="<?php echo $_SESSION['helpdesk_user_id']; ?>" id="user_init_id"> <input type="hidden" id="file_types" value="<?=$CONF['file_types']?>"> <input type="hidden" id="file_size" value="<?=$CONF['file_size']?>"> </div> </div> </div> </div> <br> </div> <div class="col-md-4"> <div class="panel panel-success" id="user_info" style="display: block;"> </div> <div id="alert_add"> </div> </div> </div> </div> </div> <?php include("footer.inc.php"); ?> <script id="template-upload" type="text/x-tmpl"> {% for (var i=0, file; file=o.files[i]; i++) { %} <tr class="template-upload fade" id="up_entry"> <td> <span class="preview"></span> </td> <td> <p class="name"> {% if (file.name.length>20) { %} {%=file.name.substr(0,10) %}...{%=file.name.substr(-5) %} {% } %} {% if (file.name.length<20) { %} {%=file.name%} {% } %} </p> <strong class="error text-danger"></strong> </td> <td> <p class="size">Processing...</p> <div class="progress progress-striped active" role="progressbar" aria-valuemin="0" aria-valuemax="100" aria-valuenow="0"><div class="progress-bar progress-bar-success" style="width:0%;"></div></div> </td> <td> {% if (!i && !o.options.autoUpload) { %} <button id="s_start" class="btn btn-primary start btn-xs" disabled><i class="glyphicon glyphicon-upload"></i> <?=lang('TICKET_file_startupload');?> </button> {% } %} {% if (!i) { %} <button class="btn btn-warning cancel btn-xs"> <i class="glyphicon glyphicon-ban-circle"></i> <span><?=lang('TICKET_file_notupload_one');?></span> </button> {% } %} </td> </tr> {% } %} </script> <!-- The template to display files available for download --> <script id="template-download" type="text/x-tmpl"> {% for (var i=0, file; file=o.files[i]; i++) { %} <tr class="template-download fade"> <td> <span class="preview"> {% if (file.thumbnailUrl) { %} <a href="{%=file.url%}" title="{%=file.name%}" download="{%=file.name%}" data-gallery><img src="{%=file.thumbnailUrl%}"></a> {% } %} </span> </td> <td> <p class="name"> {% if (file.name2.length>30) { %} <?=lang('file_info');?>: {%=file.name2.substr(0,30) %}...{%=file.name2.substr(-5) %} - <?=lang('file_info2');?> {% } %} {% if (file.name2.length<30) { %} <?=lang('file_info');?>: {%=file.name2%} - <?=lang('file_info2');?> {% } %} </p> {% if (file.error) { %} <div><span class="label label-danger">Error</span> {%=file.error%}</div> {% } %} </td> <td> <span class="size">{%=o.formatFileSize(file.size)%}</span> </td> <td> <p class="name"> <span class="label label-success"><i class="fa fa-check"></i> ok</span> </p> </td> </tr> {% } %} </script> <?php } } else { include 'auth.php'; } ?>
apache-2.0
ldts/zephyr
boards/arm/stm32f072_eval/pinmux.c
708
/* * Copyright (c) 2017 BayLibre, SAS * * SPDX-License-Identifier: Apache-2.0 */ #include <kernel.h> #include <device.h> #include <init.h> #include <pinmux.h> #include <sys_io.h> #include "pinmux/stm32/pinmux_stm32.h" /* pin assignments for STM32F072-EVAL board */ static const struct pin_config pinconf[] = { #ifdef CONFIG_UART_2 {STM32_PIN_PD5, STM32F0_PINMUX_FUNC_PD5_USART2_TX}, {STM32_PIN_PD6, STM32F0_PINMUX_FUNC_PD6_USART2_RX}, #endif /* CONFIG_UART_2 */ }; static int pinmux_stm32_init(struct device *port) { ARG_UNUSED(port); stm32_setup_pins(pinconf, ARRAY_SIZE(pinconf)); return 0; } SYS_INIT(pinmux_stm32_init, PRE_KERNEL_1, CONFIG_PINMUX_STM32_DEVICE_INITIALIZATION_PRIORITY);
apache-2.0
intentionet/batfish
projects/question/src/test/java/org/batfish/question/namedstructures/NamedStructuresAnswererTest.java
9212
package org.batfish.question.namedstructures; import static org.batfish.question.namedstructures.NamedStructuresAnswerer.getAllStructureNamesOfType; import static org.batfish.question.namedstructures.NamedStructuresAnswerer.insertedObject; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; import com.google.common.collect.HashMultiset; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multiset; import java.util.Map; import org.batfish.datamodel.Configuration; import org.batfish.datamodel.ConfigurationFormat; import org.batfish.datamodel.NetworkFactory; import org.batfish.datamodel.pojo.Node; import org.batfish.datamodel.questions.NamedStructurePropertySpecifier; import org.batfish.datamodel.routing_policy.RoutingPolicy; import org.batfish.datamodel.table.Row; import org.junit.Test; public class NamedStructuresAnswererTest { private static final String ALL_NODES = ".*"; @Test public void testGetAllStructureNamesOfType() { NetworkFactory nf = new NetworkFactory(); // c1 has both routing policies Configuration c1 = nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build(); nf.routingPolicyBuilder().setOwner(c1).setName("rp1").build(); nf.routingPolicyBuilder().setOwner(c1).setName("rp2").build(); // c2 has only one routing policy Configuration c2 = nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build(); nf.routingPolicyBuilder().setOwner(c1).setName("rp1").build(); Map<String, Configuration> configurations = ImmutableMap.of("node1", c1, "node2", c2); // both policies should be returned assertThat( getAllStructureNamesOfType( NamedStructurePropertySpecifier.ROUTING_POLICY, configurations.keySet(), configurations), equalTo(ImmutableSet.of("rp1", "rp2"))); } @Test public void testRawAnswerDefinition() { NetworkFactory nf = new NetworkFactory(); Configuration c = nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build(); RoutingPolicy rp1 = nf.routingPolicyBuilder().setOwner(c).setName("rp1").build(); RoutingPolicy rp2 = nf.routingPolicyBuilder().setOwner(c).setName("rp2").build(); nf.vrfBuilder().setOwner(c).build(); Map<String, Configuration> configurations = ImmutableMap.of("node1", c); // only get routing policies NamedStructuresQuestion question = new NamedStructuresQuestion( ALL_NODES, NamedStructurePropertySpecifier.ROUTING_POLICY, null, null, false); Multiset<Row> rows = NamedStructuresAnswerer.rawAnswer( question, configurations.keySet(), configurations, NamedStructuresAnswerer.createMetadata(question).toColumnMap()); Multiset<Row> expected = HashMultiset.create( ImmutableList.of( Row.builder() .put(NamedStructuresAnswerer.COL_NODE, new Node("node1")) .put( NamedStructuresAnswerer.COL_STRUCTURE_TYPE, NamedStructurePropertySpecifier.ROUTING_POLICY) .put(NamedStructuresAnswerer.COL_STRUCTURE_NAME, "rp1") .put( NamedStructuresAnswerer.COL_STRUCTURE_DEFINITION, insertedObject(rp1, NamedStructurePropertySpecifier.ROUTING_POLICY)) .build(), Row.builder() .put(NamedStructuresAnswerer.COL_NODE, new Node("node1")) .put( NamedStructuresAnswerer.COL_STRUCTURE_TYPE, NamedStructurePropertySpecifier.ROUTING_POLICY) .put(NamedStructuresAnswerer.COL_STRUCTURE_NAME, "rp2") .put( NamedStructuresAnswerer.COL_STRUCTURE_DEFINITION, insertedObject(rp2, NamedStructurePropertySpecifier.ROUTING_POLICY)) .build())); assertThat(rows, equalTo(expected)); } @Test public void testRawAnswerIgnoreGenerated() { NetworkFactory nf = new NetworkFactory(); Configuration c = nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build(); RoutingPolicy rp1 = nf.routingPolicyBuilder().setOwner(c).setName("rp1").build(); nf.routingPolicyBuilder().setOwner(c).setName("~rp2").build(); Map<String, Configuration> configurations = ImmutableMap.of("node1", c); NamedStructuresQuestion question = new NamedStructuresQuestion(ALL_NODES, "/.*/", null, true, null); Multiset<Row> rows = NamedStructuresAnswerer.rawAnswer( question, configurations.keySet(), configurations, NamedStructuresAnswerer.createMetadata(question).toColumnMap()); Multiset<Row> expected = HashMultiset.create( ImmutableList.of( Row.builder() .put(NamedStructuresAnswerer.COL_NODE, new Node("node1")) .put( NamedStructuresAnswerer.COL_STRUCTURE_TYPE, NamedStructurePropertySpecifier.ROUTING_POLICY) .put(NamedStructuresAnswerer.COL_STRUCTURE_NAME, "rp1") .put( NamedStructuresAnswerer.COL_STRUCTURE_DEFINITION, insertedObject(rp1, NamedStructurePropertySpecifier.ROUTING_POLICY)) .build())); assertThat(rows, equalTo(expected)); } @Test public void testRawAnswerPresence() { NetworkFactory nf = new NetworkFactory(); Configuration c1 = nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build(); nf.routingPolicyBuilder().setOwner(c1).setName("rp1").build(); Configuration c2 = nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build(); Map<String, Configuration> configurations = ImmutableMap.of("node1", c1, "node2", c2); NamedStructuresQuestion question = new NamedStructuresQuestion(ALL_NODES, "/.*/", null, null, true); Multiset<Row> rows = NamedStructuresAnswerer.rawAnswer( question, configurations.keySet(), configurations, NamedStructuresAnswerer.createMetadata(question).toColumnMap()); Multiset<Row> expected = HashMultiset.create( ImmutableList.of( Row.builder() .put(NamedStructuresAnswerer.COL_NODE, new Node("node1")) .put( NamedStructuresAnswerer.COL_STRUCTURE_TYPE, NamedStructurePropertySpecifier.ROUTING_POLICY) .put(NamedStructuresAnswerer.COL_STRUCTURE_NAME, "rp1") .put(NamedStructuresAnswerer.COL_PRESENT_ON_NODE, true) .build(), Row.builder() .put(NamedStructuresAnswerer.COL_NODE, new Node("node2")) .put( NamedStructuresAnswerer.COL_STRUCTURE_TYPE, NamedStructurePropertySpecifier.ROUTING_POLICY) .put(NamedStructuresAnswerer.COL_STRUCTURE_NAME, "rp1") .put(NamedStructuresAnswerer.COL_PRESENT_ON_NODE, false) .build())); assertThat(rows, equalTo(expected)); } @Test public void testRawAnswerStructureNameRegex() { NetworkFactory nf = new NetworkFactory(); Configuration c = nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build(); RoutingPolicy rp1 = nf.routingPolicyBuilder().setOwner(c).setName("selected-rp1").build(); nf.routingPolicyBuilder().setOwner(c).setName("leftout-rp2").build(); Map<String, Configuration> configurations = ImmutableMap.of("node1", c); NamedStructuresQuestion question = new NamedStructuresQuestion(ALL_NODES, "/.*/", "selected.*", false, null); Multiset<Row> rows = NamedStructuresAnswerer.rawAnswer( question, configurations.keySet(), configurations, NamedStructuresAnswerer.createMetadata(question).toColumnMap()); Multiset<Row> expected = HashMultiset.create( ImmutableList.of( Row.builder() .put(NamedStructuresAnswerer.COL_NODE, new Node("node1")) .put( NamedStructuresAnswerer.COL_STRUCTURE_TYPE, NamedStructurePropertySpecifier.ROUTING_POLICY) .put(NamedStructuresAnswerer.COL_STRUCTURE_NAME, "selected-rp1") .put( NamedStructuresAnswerer.COL_STRUCTURE_DEFINITION, insertedObject(rp1, NamedStructurePropertySpecifier.ROUTING_POLICY)) .build())); assertThat(rows, equalTo(expected)); } }
apache-2.0
fschueler/incubator-systemml
src/main/java/org/apache/sysml/runtime/controlprogram/parfor/opt/OptNodeStatistics.java
2875
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.controlprogram.parfor.opt; import org.apache.sysml.runtime.controlprogram.parfor.opt.PerfTestTool.DataFormat; /** * * TODO extend to right as well (see PerfTestTool, currently only trained with regard to left) * TODO integrate mem and exec time for reuse? * */ public class OptNodeStatistics { public static final long DEFAULT_DIMENSION = 100; public static final double DEFAULT_SPARSITY = 1.0; public static final DataFormat DEFAULT_DATAFORMAT = DataFormat.DENSE; //operation characteristics private long _dim1 = -1; //rows left private long _dim2 = -1; //cols left private long _dim3 = -1; //rows right private long _dim4 = -1; //cols right private double _sparsity = -1; //sparsity left private DataFormat _df = null; //data format left /** * Default constructor, sets all internal statistics to their respective default values. */ public OptNodeStatistics( ) { _dim1 = DEFAULT_DIMENSION; _dim2 = DEFAULT_DIMENSION; _dim3 = DEFAULT_DIMENSION; _dim4 = DEFAULT_DIMENSION; _sparsity = DEFAULT_SPARSITY; _df = DEFAULT_DATAFORMAT; } public OptNodeStatistics( long dim1, long dim2, long dim3, long dim4, double sparsity, DataFormat df ) { _dim1 = dim1; _dim2 = dim2; _dim3 = dim3; _dim4 = dim4; _sparsity = sparsity; _df = df; } public long getDim1() { return _dim1; } public void setDim1(long dim1) { _dim1 = dim1; } public long getDim2() { return _dim2; } public void setDim2(long dim2) { _dim2 = dim2; } public long getDim3() { return _dim3; } public void setDim3(long dim3) { _dim3 = dim3; } public long getDim4() { return _dim4; } public void setDim4(long dim4) { _dim4 = dim4; } public double getSparsity() { return _sparsity; } public void setSparsity(double sparsity) { _sparsity = sparsity; } public DataFormat getDataFormat() { return _df; } public void setDataFormat(DataFormat df) { _df = df; } }
apache-2.0
progsung/cesium
Source/Core/Spline.js
4935
import Check from "./Check.js"; import defaultValue from "./defaultValue.js"; import defined from "./defined.js"; import DeveloperError from "./DeveloperError.js"; import CesiumMath from "./Math.js"; /** * Creates a curve parameterized and evaluated by time. This type describes an interface * and is not intended to be instantiated directly. * * @alias Spline * @constructor * * @see CatmullRomSpline * @see HermiteSpline * @see LinearSpline * @see QuaternionSpline */ function Spline() { /** * An array of times for the control points. * @type {Number[]} * @default undefined */ this.times = undefined; /** * An array of control points. * @type {Cartesian3[]|Quaternion[]} * @default undefined */ this.points = undefined; DeveloperError.throwInstantiationError(); } /** * Evaluates the curve at a given time. * @function * * @param {Number} time The time at which to evaluate the curve. * @param {Cartesian3|Quaternion|Number[]} [result] The object onto which to store the result. * @returns {Cartesian3|Quaternion|Number[]} The modified result parameter or a new instance of the point on the curve at the given time. * * @exception {DeveloperError} time must be in the range <code>[t<sub>0</sub>, t<sub>n</sub>]</code>, where <code>t<sub>0</sub></code> * is the first element in the array <code>times</code> and <code>t<sub>n</sub></code> is the last element * in the array <code>times</code>. */ Spline.prototype.evaluate = DeveloperError.throwInstantiationError; /** * Finds an index <code>i</code> in <code>times</code> such that the parameter * <code>time</code> is in the interval <code>[times[i], times[i + 1]]</code>. * * @param {Number} time The time. * @param {Number} startIndex The index from which to start the search. * @returns {Number} The index for the element at the start of the interval. * * @exception {DeveloperError} time must be in the range <code>[t<sub>0</sub>, t<sub>n</sub>]</code>, where <code>t<sub>0</sub></code> * is the first element in the array <code>times</code> and <code>t<sub>n</sub></code> is the last element * in the array <code>times</code>. */ Spline.prototype.findTimeInterval = function (time, startIndex) { var times = this.times; var length = times.length; //>>includeStart('debug', pragmas.debug); if (!defined(time)) { throw new DeveloperError("time is required."); } if (time < times[0] || time > times[length - 1]) { throw new DeveloperError("time is out of range."); } //>>includeEnd('debug'); // Take advantage of temporal coherence by checking current, next and previous intervals // for containment of time. startIndex = defaultValue(startIndex, 0); if (time >= times[startIndex]) { if (startIndex + 1 < length && time < times[startIndex + 1]) { return startIndex; } else if (startIndex + 2 < length && time < times[startIndex + 2]) { return startIndex + 1; } } else if (startIndex - 1 >= 0 && time >= times[startIndex - 1]) { return startIndex - 1; } // The above failed so do a linear search. For the use cases so far, the // length of the list is less than 10. In the future, if there is a bottle neck, // it might be here. var i; if (time > times[startIndex]) { for (i = startIndex; i < length - 1; ++i) { if (time >= times[i] && time < times[i + 1]) { break; } } } else { for (i = startIndex - 1; i >= 0; --i) { if (time >= times[i] && time < times[i + 1]) { break; } } } if (i === length - 1) { i = length - 2; } return i; }; /** * Wraps the given time to the period covered by the spline. * @function * * @param {Number} time The time. * @return {Number} The time, wrapped around the animation period. */ Spline.prototype.wrapTime = function (time) { //>>includeStart('debug', pragmas.debug); Check.typeOf.number("time", time); //>>includeEnd('debug'); var times = this.times; var timeEnd = times[times.length - 1]; var timeStart = times[0]; var timeStretch = timeEnd - timeStart; var divs; if (time < timeStart) { divs = Math.floor((timeStart - time) / timeStretch) + 1; time += divs * timeStretch; } if (time > timeEnd) { divs = Math.floor((time - timeEnd) / timeStretch) + 1; time -= divs * timeStretch; } return time; }; /** * Clamps the given time to the period covered by the spline. * @function * * @param {Number} time The time. * @return {Number} The time, clamped to the animation period. */ Spline.prototype.clampTime = function (time) { //>>includeStart('debug', pragmas.debug); Check.typeOf.number("time", time); //>>includeEnd('debug'); var times = this.times; return CesiumMath.clamp(time, times[0], times[times.length - 1]); }; export default Spline;
apache-2.0
tequalsme/nifi
nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/resources/docs/org.apache.nifi.processors.standard.EncryptContent/additionalDetails.html
1454
<!DOCTYPE html> <html lang="en"> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <head> <meta charset="utf-8"/> <title>EncryptContent</title> <link rel="stylesheet" href="/nifi-docs/css/component-usage.css" type="text/css"/> </head> <body> <!-- Processor Documentation ================================================== --> <p> <strong>Note:</strong> This processor supports OpenPGP algorithms that are compatible with third party programs. However, it currently cannot add a digital signature to an encrypted FlowFile. </p> </body> </html>
apache-2.0
dublinio/smile
Smile/src/main/java/smile/feature/Nominal2Binary.java
3646
/******************************************************************************* * Copyright (c) 2010 Haifeng Li * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package smile.feature; import smile.data.Attribute; import smile.data.NominalAttribute; import smile.data.NumericAttribute; /** * Nominal variable to binary dummy variables feature generator. Although some * method such as decision trees can handle nominal variable directly, other * methods generally require nominal variables converted to multiple binary * dummy variables to indicate the presence or absence of a characteristic. * * @author Haifeng Li */ public class Nominal2Binary implements Feature<double[]> { /** * The variable attributes. */ private Attribute[] attributes; /** * The attributes of generated binary dummy variables. */ private Attribute[] features; /** * A map from feature id to original attribute index. */ private int[] map; /** * A map from feature id to nominal attribute value. */ private int[] value; /** * Constructor. * @param attributes the variable attributes. Of which, nominal variables * will be converted to binary dummy variables. */ public Nominal2Binary(Attribute[] attributes) { this.attributes = attributes; int p = 0; for (Attribute attribute : attributes) { if (attribute instanceof NominalAttribute) { NominalAttribute nominal = (NominalAttribute) attribute; p += nominal.size(); } } features = new Attribute[p]; map = new int[p]; value = new int[p]; for (int i = 0, j = 0; j < attributes.length; j++) { Attribute attribute = attributes[j]; if (attribute instanceof NominalAttribute) { NominalAttribute nominal = (NominalAttribute) attribute; double weight = nominal.weight; String name = nominal.name; String description = nominal.description; for (int k = 0; k < nominal.size(); k++, i++) { features[i] = new NumericAttribute(name + "_" + k, description, weight); map[i] = j; value[i] = k; } } } } @Override public Attribute[] attributes() { return features; } @Override public double f(double[] object, int id) { if (object.length != attributes.length) { throw new IllegalArgumentException(String.format("Invalide object size %d, expected %d", object.length, attributes.length)); } if (id < 0 || id >= features.length) { throw new IllegalArgumentException("Invalide feature id: " + id); } if (object[map[id]] == value[id]) { return 1; } else { return 0; } } }
apache-2.0
ketan/gocd
server/src/main/webapp/WEB-INF/rails/spec/views/shared/_build_cause_html_spec.rb
11695
# # Copyright 2019 ThoughtWorks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'rails_helper' describe "/shared/_build_cause.html.erb" do include StageModelMother before do @modification = Modification.new(@date=java.util.Date.new, "1234", "label-1", nil) @modification.setUserName("username") @modification.setComment("#42 I changed something") @modification.setModifiedFiles([ModifiedFile.new("nimmappa/foo.txt", "", ModifiedAction::added), ModifiedFile.new("nimmappa/bar.txt", "", ModifiedAction::deleted), ModifiedFile.new("nimmappa/baz.txt", "", ModifiedAction::modified), ModifiedFile.new("nimmappa/quux.txt", "", ModifiedAction::unknown)]) @revisions = MaterialRevisions.new([].to_java(MaterialRevision)) @svn_revisions = ModificationsMother.createMaterialRevisions(MaterialsMother.svnMaterial("url", "Folder", nil, "pass", true, "*.doc"), @modification) @svn_revisions.getMaterialRevision(0).markAsChanged() @svn_revisions.materials().get(0).setName(CaseInsensitiveString.new("SvnName")) @revisions.addAll(@svn_revisions) @hg_revisions = ModificationsMother.createHgMaterialRevisions() @revisions.addAll(@hg_revisions) @dependency_revisions = ModificationsMother.changedDependencyMaterialRevision("up_pipeline", 10, "label-10", "up_stage", 5, Time.now) @revisions.addRevision(@dependency_revisions) assign :stage, stage_with_three_runs() end it "should not display modified files if the flag is not set" do allow(view).to receive(:go_config_service).and_return(config_service = double('go_config_service')); allow(config_service).to receive(:getCommentRendererFor).with("foo").and_return(TrackingTool.new("http://pavan/${ID}", "#(\\d+)")) render :partial => "shared/build_cause", :locals => {:scope => {:material_revisions => @revisions, :show_files => false, :pipeline_name => "foo"}} Capybara.string(response.body).find(".build_cause #material_#{@svn_revisions.materials().get(0).getPipelineUniqueFingerprint()}.changed").tap do |material| expect(material).to have_selector(".material_name", :text => "Subversion - SvnName") material.find(".change").tap do |change| change.find(".revision").tap do |revision| expect(revision).to have_selector("dt", :text => "Revision:") expect(revision).to have_selector("dd", :text => "1234") end change.find(".modified_by").tap do |revision| expect(revision).to have_selector("dt", :text => "Modified by:") expect(revision).to have_selector("dd", :text => "username on #{@date.iso8601}") end change.find(".comment").tap do |revision| expect(revision).to have_selector("dt", "Comment:") expect(revision).to have_selector("dd", :text => "#42 I changed something") revision.find("dd").tap do |comment| expect(comment).to have_selector("a[href='http://pavan/42'][target='story_tracker']", :text => "#42") end end expect(change).to_not have_selector(".modified_files") end end Capybara.string(response.body).find(".build_cause #material_#{@hg_revisions.materials().get(0).getPipelineUniqueFingerprint()}").tap do |material| expect(material).to have_selector(".material_name", :text => "Mercurial - hg-url") material.all(".change").tap do |changes| change1 = changes[0] change2 = changes[1] change1.find(".revision").tap do |revision| expect(revision).to have_selector("dt", :text => "Revision:") expect(revision).to have_selector("dd", :text => "9fdcf27f16eadc362733328dd481d8a2c29915e1") end change1.find(".modified_by").tap do |revision| expect(revision).to have_selector("dt", :text => "Modified by:") expect(revision).to have_selector("dd", :text => "user2 on #{ModificationsMother::TODAY_CHECKIN.iso8601}") end change1.find(".comment").tap do |revision| expect(revision).to have_selector("dt", "Comment:") expect(revision).to have_selector("dd", :text => "comment2") end expect(change1).to_not have_selector(".modified_files") change2.find(".revision").tap do |revision| expect(revision).to have_selector("dt", :text => "Revision:") expect(revision).to have_selector("dd", :text => "eef77acd79809fc14ed82b79a312648d4a2801c6") end change2.find(".modified_by").tap do |revision| expect(revision).to have_selector("dt", :text => "Modified by:") expect(revision).to have_selector("dd", :text => "user1 on #{ModificationsMother::TWO_DAYS_AGO_CHECKIN.iso8601}") end change2.find(".comment").tap do |revision| expect(revision).to have_selector("dt", "Comment:") expect(revision).to have_selector("dd", :text => "comment1") end expect(change2).to_not have_selector(".modified_files") end end dependency_material = @dependency_revisions.getMaterial() Capybara.string(response.body).find(".build_cause #material_#{dependency_material.getPipelineUniqueFingerprint()}.changed").tap do |material| expect(material).to have_selector(".material_name", :text => "Pipeline - #{dependency_material.getDisplayName()}") material.find(".change").tap do |change| change.find(".revision").tap do |revision| expect(revision).to have_selector("dt", :text => "Revision:") expect(revision).to have_selector("dd a[href='/pipelines/up_pipeline/10/up_stage/5']", :text => "up_pipeline/10/up_stage/5") end change.find(".label").tap do |label| expect(label).to have_selector("dt", :text => "VSM:") expect(label).to have_selector("dd a[href='/go/pipelines/value_stream_map/up_pipeline/10']", :text => "label-10") end change.find(".completed_at").tap do |completed_at| expect(completed_at).to have_selector("dt", "Completed at:") expect(completed_at).to have_selector("dd", :text => "#{@dependency_revisions.getModification(0).getModifiedTime().iso8601}") end end end end it "should html espace all the user entered fields" do allow(view).to receive(:go_config_service).and_return(config_service = double('go_config_service')); allow(config_service).to receive(:getCommentRendererFor).with("foo").and_return(TrackingTool.new("http://pavan/${ID}", "#(\\d+)")) @modification.setComment("<script>alert('Check-in comment')</script>") @modification.setUserName("<script>alert('Check-in user')</script>") @modification.setEmailAddress("<script>alert('Check-in email address')</script>") render :partial => "shared/build_cause", :locals => {:scope => {:material_revisions => @revisions, :show_files => false, :pipeline_name => "foo"}} Capybara.string(response.body).find(".build_cause #material_#{@svn_revisions.materials().get(0).getPipelineUniqueFingerprint()}.changed").tap do |material| expect(material).to have_selector(".material_name", "Subversion - SvnName") material.find(".change").tap do |change| change.find(".modified_by").tap do |revision| expect(revision.find("dd").native.to_s).to include "&lt;script&gt;alert('Check-in user')&lt;/script&gt; on #{@date.iso8601}" end change.find(".comment").tap do |revision| expect(revision.find("dd").native.to_s).to include "&lt;script&gt;alert('Check-in comment')&lt;/script&gt;" end end end end it "should html espace all the user entered fields" do allow(view).to receive(:go_config_service).and_return(config_service = double('go_config_service')); allow(config_service).to receive(:getCommentRendererFor).with("foo").and_return(TrackingTool.new("http://pavan/${ID}", "#(\\d+)")) @modification.setComment("<script>alert('Check-in comment')</script>") @modification.setUserName("<script>alert('Check-in user')</script>") @modification.setEmailAddress("<script>alert('Check-in email address')</script>") render :partial => "shared/build_cause", :locals => {:scope => {:material_revisions => @revisions, :show_files => false, :pipeline_name => "foo"}} Capybara.string(response.body).find(".build_cause #material_#{@svn_revisions.materials().get(0).getPipelineUniqueFingerprint()}.changed").tap do |material| expect(material).to have_selector(".material_name", :text => "Subversion - SvnName") material.find(".change").tap do |change| change.find(".modified_by").tap do |revision| expect(revision.find("dd").native.to_s).to include "&lt;script&gt;alert('Check-in user')&lt;/script&gt; on #{@date.iso8601}" end change.find(".comment").tap do |revision| expect(revision.find("dd").native.to_s).to include "&lt;script&gt;alert('Check-in comment')&lt;/script&gt;" end end end end it "should render comment for package material" do modification = Modification.new("user", '{"TYPE":"PACKAGE_MATERIAL","TRACKBACK_URL" : "http://google.com", "COMMENT" : "Some comment."}', "", @date=java.util.Date.new, "12345") package_material = MaterialsMother.packageMaterial() package_material_revision = MaterialRevision.new(package_material, [modification].to_java(Modification)) revisions = MaterialRevisions.new([package_material_revision].to_java(MaterialRevision)) render :partial => "shared/build_cause", :locals => {:scope => {:material_revisions => revisions, :show_files => false, :pipeline_name => "foo"}} Capybara.string(response.body).find(".build_cause #material_#{package_material.getPipelineUniqueFingerprint()}").tap do |material| expect(material).to have_selector(".material_name", :text => "Package - repo-name_package-name") material.find(".change").tap do |change| change.find(".modified_by").tap do |revision| expect(revision).to have_selector("dd", :text => "user on #{@date.iso8601}") end change.find(".comment").tap do |revision| expect(revision).to have_selector("dd", :text => "Some comment.Trackback: http://google.com") end end end end it "should render user for display for build cause" do modification = Modification.new("", 'some comment', "", @date=java.util.Date.new, "12345") material = MaterialsMother.svnMaterial() material_revision = MaterialRevision.new(material, [modification].to_java(Modification)) revisions = MaterialRevisions.new([material_revision].to_java(MaterialRevision)) allow(view).to receive(:render_comment).with(modification, 'foo').and_return('something') render :partial => "shared/build_cause", :locals => {:scope => {:material_revisions => revisions, :show_files => false, :pipeline_name => "foo"}} Capybara.string(response.body).find(".build_cause #material_#{material.getPipelineUniqueFingerprint()}").tap do |material| expect(material).to have_selector(".material_name", "Subversion - url") expect(material).to have_selector(".modified_by dd", "anonymous on #{@date.iso8601}") end end end
apache-2.0
18826252059/im
web/bundles/topxiaadmin/js/controller/user/teacher-list.js
539
define(function(require, exports, module) { var Notify = require('common/bootstrap-notify'); exports.run = function() { var $table = $('#teacher-table'); $table.on('click', '.promote-user', function(){ $.post($(this).data('url'),function(response) { window.location.reload(); }); }); $table.on('click', '.cancel-promote-user', function(){ $.post($(this).data('url'),function(response) { window.location.reload(); }); }); }; });
apache-2.0
elkingtonmcb/simbody
SimTKmath/Integrators/src/SemiExplicitEulerIntegratorRep.h
2406
#ifndef SimTK_SIMMATH_SEMI_EXPLICIT_EULER_INTEGRATOR_REP_H_ #define SimTK_SIMMATH_SEMI_EXPLICIT_EULER_INTEGRATOR_REP_H_ /* -------------------------------------------------------------------------- * * Simbody(tm): SimTKmath * * -------------------------------------------------------------------------- * * This is part of the SimTK biosimulation toolkit originating from * * Simbios, the NIH National Center for Physics-Based Simulation of * * Biological Structures at Stanford, funded under the NIH Roadmap for * * Medical Research, grant U54 GM072970. See https://simtk.org/home/simbody. * * * * Portions copyright (c) 2013 Stanford University and the Authors. * * Authors: Michael Sherman * * Contributors: * * * * Licensed under the Apache License, Version 2.0 (the "License"); you may * * not use this file except in compliance with the License. You may obtain a * * copy of the License at http://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * -------------------------------------------------------------------------- */ #include "AbstractIntegratorRep.h" namespace SimTK { class SemiExplicitEulerIntegratorRep : public AbstractIntegratorRep { public: SemiExplicitEulerIntegratorRep(Integrator* handle, const System& sys); protected: bool attemptDAEStep (Real t1, Vector& yErrEst, int& errOrder, int& numIterations) override; void createInterpolatedState(Real t) override; void backUpAdvancedStateByInterpolation(Real t) override; }; } // namespace SimTK #endif // SimTK_SIMMATH_SEMI_EXPLICIT_EULER_INTEGRATOR_REP_H_
apache-2.0
karllessard/tensorflow
tensorflow/compiler/mlir/lite/utils/lstm_utils.cc
34132
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/compiler/mlir/lite/utils/lstm_utils.h" #include "llvm/ADT/ArrayRef.h" #include "llvm/ADT/None.h" #include "llvm/ADT/SmallVector.h" #include "llvm/ADT/StringRef.h" #include "llvm/Support/Casting.h" #include "llvm/Support/raw_ostream.h" #include "mlir/Dialect/StandardOps/IR/Ops.h" // from @llvm-project #include "mlir/IR/Attributes.h" // from @llvm-project #include "mlir/IR/Builders.h" // from @llvm-project #include "mlir/IR/Function.h" // from @llvm-project #include "mlir/IR/Identifier.h" // from @llvm-project #include "mlir/IR/Location.h" // from @llvm-project #include "mlir/IR/MLIRContext.h" // from @llvm-project #include "mlir/IR/OpDefinition.h" // from @llvm-project #include "mlir/IR/Operation.h" // from @llvm-project #include "mlir/IR/StandardTypes.h" // from @llvm-project #include "mlir/IR/Types.h" // from @llvm-project #include "mlir/IR/Value.h" // from @llvm-project #include "mlir/Support/LLVM.h" // from @llvm-project #include "mlir/Support/LogicalResult.h" // from @llvm-project #include "tensorflow/compiler/mlir/lite/ir/tfl_ops.h" #include "tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h" namespace mlir { namespace TFL { namespace { Value CreateI32SplatConst(OpBuilder* builder, ArrayRef<int64_t> shape, int32_t val, mlir::Location location) { auto type = RankedTensorType::get(shape, builder->getIntegerType(32)); auto attr = DenseElementsAttr::get(type, val); return builder->create<ConstantOp>(location, type, attr); } Value CreateF32SplatConst(OpBuilder* builder, ArrayRef<int64_t> shape, float val, mlir::Location location) { auto type = RankedTensorType::get(shape, builder->getF32Type()); auto attr = DenseElementsAttr::get(type, val); return builder->create<ConstantOp>(location, type, attr); } Value CreatTfF32ConstOp(OpBuilder* builder, ArrayRef<int64_t> shape, float val, mlir::Location location) { auto type = RankedTensorType::get(shape, builder->getF32Type()); auto ele_type = RankedTensorType::get({1}, builder->getF32Type()); auto attr = DenseElementsAttr::get(ele_type, val); return builder->create<TF::ConstOp>(location, type, attr); } Value CreateI64DenseConst(OpBuilder* builder, ArrayRef<int64_t> shape, ArrayRef<int64_t> values, mlir::Location location) { auto type = RankedTensorType::get(static_cast<int>(shape.size()), builder->getIntegerType(64)); auto attr = DenseElementsAttr::get(type, values); return builder->create<ConstantOp>(location, type, attr); } Value CreateI32DenseConst(OpBuilder* builder, ArrayRef<int32_t> values, mlir::Location location) { auto type = RankedTensorType::get(static_cast<int>(values.size()), builder->getIntegerType(32)); auto attr = DenseElementsAttr::get(type, values); return builder->create<ConstantOp>(location, type, attr); } Value CreateNoneValue(OpBuilder* builder, mlir::Location location) { return builder->create<mlir::ConstantOp>(location, builder->getNoneType(), builder->getUnitAttr()); } Value Transpose(OpBuilder* builder, Value value_to_transpose, SmallVector<int32_t, 4> perm, RankedTensorType original_type, mlir::Location location) { // Create a constant op for transpose permutation. auto perm_op = CreateI32DenseConst(builder, perm, location); // Create tensor type for the transpose result. auto transpose_type = original_type; auto transpose_shape = llvm::to_vector<8>(llvm::map_range(perm, [transpose_type](int32_t dim) { return transpose_type.getDimSize(dim); })); auto elem_type = transpose_type.getElementType(); auto result_type = RankedTensorType::get(transpose_shape, elem_type); return builder->create<TF::TransposeOp>(location, result_type, value_to_transpose, perm_op); } Value Transpose2D(OpBuilder* builder, Value value_to_transpose, RankedTensorType type, mlir::Location location) { // Create a constant op for transpose permutation. SmallVector<int32_t, 4> perm = {1, 0}; return Transpose(builder, value_to_transpose, perm, type, location); } Value Reverse(OpBuilder* builder, Value value_to_reverse, int axis, RankedTensorType type, mlir::Location location) { auto axis_op = CreateI32SplatConst(builder, {1}, axis, location); // The result type will be the same as the input. return builder->create<TF::ReverseV2Op>(location, type, value_to_reverse, axis_op); } ArrayRef<int64_t> GetRankedTensorShape(Value value) { return value.getType().cast<RankedTensorType>().getShape(); } Value SliceRankedTensor(OpBuilder* builder, Value input, ArrayRef<int64_t> begin_shape, ArrayRef<int64_t> begin_values, ArrayRef<int64_t> size_shape, ArrayRef<int64_t> size_values, mlir::Location location) { // If the size of the tensor to be sliced from the input overflows // the input tensor's dimensions, return 0-valued tensor of the requested // shape. ArrayRef<int64_t> input_shape = GetRankedTensorShape(input); for (int i = 0, end = input_shape.size(); i < end; i++) { if (begin_values[i] < 0 || (begin_values[i] + size_values[i] > input_shape[i])) { return CreateF32SplatConst(builder, size_shape, 0, location); } } // Create a dense constant op for slice's begin auto slice_i2c_begin = CreateI64DenseConst(builder, begin_shape, begin_values, location); // Create a dense constant op for slice's size auto slice_i2c_size = CreateI64DenseConst(builder, size_shape, size_values, location); return builder->create<TF::SliceOp>( location, RankedTensorType::get( size_values, input.getType().cast<RankedTensorType>().getElementType()), input, slice_i2c_begin, slice_i2c_size); } Value CreateStridedSliceOp(mlir::Location loc, ArrayRef<int64_t> output_shape, Value input, ArrayRef<int32_t> begin, ArrayRef<int32_t> end, ArrayRef<int32_t> strides, int64_t begin_mask, int64_t end_mask, int64_t ellipsis_mask, int64_t new_axis_mask, int64_t shrink_axis_mask, OpBuilder* builder) { auto output_type = RankedTensorType::get( output_shape, input.getType().cast<RankedTensorType>().getElementType()); auto begin_tensor = CreateI32DenseConst(builder, begin, loc); auto end_tensor = CreateI32DenseConst(builder, end, loc); auto strides_tensor = CreateI32DenseConst(builder, strides, loc); return builder->create<TF::StridedSliceOp>( loc, output_type, input, begin_tensor, end_tensor, strides_tensor, builder->getI64IntegerAttr(begin_mask), builder->getI64IntegerAttr(end_mask), builder->getI64IntegerAttr(ellipsis_mask), builder->getI64IntegerAttr(new_axis_mask), builder->getI64IntegerAttr(shrink_axis_mask)); } } // namespace void ConvertLSTMCellSimpleToFusedLSTM::SetWeightForInputToCellGate() { SmallVector<int64_t, 2> begin_i2c_values = {0, 0}; input2cell_ = SliceRankedTensor( &builder_, weight_transposed_, weight_slice_shape_, begin_i2c_values, weight_slice_shape_, weight_slice_size_input_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetWeightForInputToInputGate() { SmallVector<int64_t, 2> begin_i2i_values = {n_cell_, 0}; input2input_ = couple_input_forget_gates_ ? none_ : SliceRankedTensor(&builder_, weight_transposed_, weight_slice_shape_, begin_i2i_values, weight_slice_shape_, weight_slice_size_input_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetWeightForInputToForgetGate() { int input_forget_start = couple_input_forget_gates_ ? n_cell_ : 2 * n_cell_; SmallVector<int64_t, 2> begin_i2f_values = {input_forget_start, 0}; input2forget_ = SliceRankedTensor( &builder_, weight_transposed_, weight_slice_shape_, begin_i2f_values, weight_slice_shape_, weight_slice_size_input_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetWeightForInputToOutputGate() { int input_output_start = couple_input_forget_gates_ ? 2 * n_cell_ : 3 * n_cell_; SmallVector<int64_t, 2> begin_i2o_values = {input_output_start, 0}; input2output_ = SliceRankedTensor( &builder_, weight_transposed_, weight_slice_shape_, begin_i2o_values, weight_slice_shape_, weight_slice_size_input_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetWeightForRecurrentToCellGate() { SmallVector<int64_t, 2> begin_rec2c_values = {0, n_input_}; rec2cell_ = SliceRankedTensor( &builder_, weight_transposed_, weight_slice_shape_, begin_rec2c_values, weight_slice_shape_, weight_slice_size_recurrent_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetWeightForRecurrentToInputGate() { SmallVector<int64_t, 2> begin_rec2i_values = {n_cell_, n_input_}; rec2input_ = couple_input_forget_gates_ ? none_ : SliceRankedTensor(&builder_, weight_transposed_, weight_slice_shape_, begin_rec2i_values, weight_slice_shape_, weight_slice_size_recurrent_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetWeightForRecurrentToForgetGate() { int rec_forget_start = couple_input_forget_gates_ ? n_cell_ : 2 * n_cell_; SmallVector<int64_t, 2> begin_rec2f_values = {rec_forget_start, n_input_}; rec2forget_ = SliceRankedTensor( &builder_, weight_transposed_, weight_slice_shape_, begin_rec2f_values, weight_slice_shape_, weight_slice_size_recurrent_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetWeightForRecurrentToOutputGate() { int rec_output_start = couple_input_forget_gates_ ? 2 * n_cell_ : 3 * n_cell_; SmallVector<int64_t, 2> begin_rec2o_values = {rec_output_start, n_input_}; rec2output_ = SliceRankedTensor( &builder_, weight_transposed_, weight_slice_shape_, begin_rec2o_values, weight_slice_shape_, weight_slice_size_recurrent_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetBiasToCellGate() { SmallVector<int64_t, 1> begin_bias2c_values = {0}; bias2cell_ = SliceRankedTensor(&builder_, bias_, bias_slice_shape_, begin_bias2c_values, bias_slice_shape_, bias_size_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetBiasToInputGate() { SmallVector<int64_t, 1> begin_bias2i_values = {n_cell_}; bias2input_ = couple_input_forget_gates_ ? none_ : SliceRankedTensor(&builder_, bias_, bias_slice_shape_, begin_bias2i_values, bias_slice_shape_, bias_size_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetBiasToForgetGate() { int bias_forget_start = couple_input_forget_gates_ ? n_cell_ : 2 * n_cell_; SmallVector<int64_t, 1> begin_bias2f_values = {bias_forget_start}; bias2forget_ = SliceRankedTensor(&builder_, bias_, bias_slice_shape_, begin_bias2f_values, bias_slice_shape_, bias_size_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetBiasToOutputGate() { int bias_output_start = couple_input_forget_gates_ ? 2 * n_cell_ : 3 * n_cell_; SmallVector<int64_t, 1> begin_bias2o_values = {bias_output_start}; bias2output_ = SliceRankedTensor(&builder_, bias_, bias_slice_shape_, begin_bias2o_values, bias_slice_shape_, bias_size_values_, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetProjection() { SmallVector<int64_t, 2> projection_slice_shape = { 1, num_cols_projection_transposed_}; SmallVector<int64_t, 2> projection_slice_size_values = {n_output_, n_cell_}; SmallVector<int64_t, 2> projection_slice_begin_values = {0, 0}; proj_weight_ = !projection_ ? none_ : SliceRankedTensor( &builder_, projection_transposed_, projection_slice_shape, projection_slice_begin_values, projection_slice_shape, projection_slice_size_values, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetProjectionBias() { proj_bias_ = !projection_type_ ? none_ : CreateF32SplatConst(&builder_, {n_output_}, 0, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetInputActivationState() { input_activation_state_ = CreateF32SplatConst(&builder_, {1, n_output_}, 0, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetInputCellState() { input_cell_state_ = CreateF32SplatConst(&builder_, {1, n_cell_}, 0, fused_func_op_.getLoc()); } void ConvertLSTMCellSimpleToFusedLSTM::SetCellLayerNormCoefficients() { cell_layer_norm_coefficients_ = none_; } void ConvertLSTMCellSimpleToFusedLSTM::SetInputLayerNormCoefficients() { input_layer_norm_coefficients_ = none_; } void ConvertLSTMCellSimpleToFusedLSTM::SetForgetLayerNormCoefficients() { forget_layer_norm_coefficients_ = none_; } void ConvertLSTMCellSimpleToFusedLSTM::SetOutputLayerNormCoefficients() { output_layer_norm_coefficients_ = none_; } void ConvertLSTMCellSimpleToFusedLSTM::GenerateFusedOpOperands() { // Transpose both weight and projection. weight_transposed_ = Transpose2D(&builder_, weight_, weight_type_, fused_func_op_.getLoc()); projection_transposed_ = Transpose2D(&builder_, projection_, projection_type_, fused_func_op_.getLoc()); none_ = CreateNoneValue(&builder_, fused_func_op_.getLoc()); // Extract input to cifg gates via slicing the weight tensor SetWeightForInputToCellGate(); SetWeightForInputToInputGate(); SetWeightForInputToForgetGate(); SetWeightForInputToOutputGate(); // Extract recurrent to cifg gates via slicing the weight tensor SetWeightForRecurrentToCellGate(); SetWeightForRecurrentToInputGate(); SetWeightForRecurrentToForgetGate(); SetWeightForRecurrentToOutputGate(); // Extract bias to cifg gates via slicing the bias tensor SetBiasToCellGate(); SetBiasToInputGate(); SetBiasToForgetGate(); SetBiasToOutputGate(); // Extract projection and set an empty projection bias SetProjection(); SetProjectionBias(); // Set the variable tensors SetInputActivationState(); SetInputCellState(); // Extract the layer norm coefficients SetCellLayerNormCoefficients(); SetInputLayerNormCoefficients(); SetForgetLayerNormCoefficients(); SetOutputLayerNormCoefficients(); } void ConvertLSTMCellSimpleToFusedLSTM::UpdateFuncSignature() { // https://github.com/tensorflow/community/pull/113 SmallVector<int64_t, 2> output_shape{1, -1}; auto input_types = fused_func_op_.getType().getInputs(); auto output_type = mlir::RankedTensorType::get( output_shape, input_.getType().cast<RankedTensorType>().getElementType()); fused_func_op_.setType(mlir::FunctionType::get(input_types, output_type, fused_func_op_.getContext())); } LogicalResult ConvertLSTMCellSimpleToFusedLSTM::RewriteFunc() { LogicalResult result = Initialize(); if (failed(result)) { return result; } // Update the func signature, based on output shape. // The func will ultimately return the output of the fused // LSTM op. UpdateFuncSignature(); // Transform the weights, projection, bias and layer norm coefficients // to generate operands for the TFL fused LSTM op. GenerateFusedOpOperands(); // Create the fused LSTM op. SmallVector<int64_t, 2> output_shape = {1, n_output_}; auto result_type = mlir::RankedTensorType::get( output_shape, input_.getType().cast<RankedTensorType>().getElementType()); lstm_ = builder_.create<mlir::TFL::LSTMOp>( fused_func_op_.getLoc(), result_type, input_, input2input_, input2forget_, input2cell_, input2output_, rec2input_, rec2forget_, rec2cell_, rec2output_, /*cell_to_input_weights*/ none_, /*cell_to_forget_weights*/ none_, /*cell_to_output_weights*/ none_, bias2input_, bias2forget_, bias2cell_, bias2output_, proj_weight_, proj_bias_, input_activation_state_, input_cell_state_, input_layer_norm_coefficients_, forget_layer_norm_coefficients_, cell_layer_norm_coefficients_, output_layer_norm_coefficients_, builder_.getStringAttr("TANH"), builder_.getF32FloatAttr(10.0), builder_.getF32FloatAttr(0.0), builder_.getStringAttr("FULL"), /*input_to_input_intermediate=*/mlir::TypeAttr(), /*input_to_forget_intermediate=*/mlir::TypeAttr(), /*input_to_cell_intermediate=*/mlir::TypeAttr(), /*input_to_output_intermediate=*/mlir::TypeAttr(), /*effective_hidden_scale_intermediate=*/mlir::TypeAttr()); // Cast the static shaped lstm result to FuncOp's signature - // Ranked but unknown 2nd dimension to support stacking these. SmallVector<int64_t, 2> func_output_shape = {1, -1}; auto func_result_type = mlir::RankedTensorType::get( func_output_shape, input_.getType().cast<RankedTensorType>().getElementType()); auto tensor_cast = builder_.create<mlir::TensorCastOp>( fused_func_op_.getLoc(), lstm_.getResult(), func_result_type); builder_.create<mlir::ReturnOp>(fused_func_op_.getLoc(), tensor_cast.getResult()); return success(); } LogicalResult ConvertLSTMCellSimpleToFusedLSTM::InitializeFromFuncAttributes() { auto attr = fused_func_op_.getAttrOfType<StringAttr>(kTFImplements); if (!attr) { return fused_func_op_.emitError() << "Invalid function attribute, expected " << kTFImplements << " attribute " "not found"; } // TODO(ashwinm, b/144775479): Make these NamedAttribute on TF import // once tf.function can support this. llvm::SmallVector<llvm::StringRef, 4> attr_tokens; attr.getValue().split(attr_tokens, ","); if (attr_tokens.empty()) { return fused_func_op_.emitError() << kTFImplements << " attribute should be set"; } // Check if the interface matches. if (GetCompositeOpName().str() != attr_tokens[0]) { return fused_func_op_.emitError() << "Unexpected interface for the composite op. Expected: " << GetCompositeOpName() << " Actual: " << attr_tokens[0]; } // Extract other interface attributes, for now cifg. couple_input_forget_gates_ = std::find(attr_tokens.begin() + 1, attr_tokens.end(), kCoupleInputForgetGates) != attr_tokens.end(); return success(); } LogicalResult ConvertLSTMCellSimpleToFusedLSTM::Initialize() { if (failed(InitializeFromFuncAttributes())) { return fused_func_op_.emitError() << "Expected function attributes were not set on the function " "encapsulating the composite op"; } num_gates_ = couple_input_forget_gates_ ? 3 : 4; input_ = fused_func_op_.getArgument(0); bias_ = fused_func_op_.getArgument(2); weight_ = fused_func_op_.getArgument(1); weight_type_ = weight_.getType().cast<RankedTensorType>(); if (weight_type_.getRank() != 2) { return fused_func_op_.emitError() << "The weight tensor was not of rank 2"; } if (weight_type_.getDimSize(1) % num_gates_ != 0) { return fused_func_op_.emitError() << "Invalid dimension 1 of weight tensor, " "should be divisible by the number of gates"; } n_cell_ = weight_type_.getDimSize(1) / num_gates_; projection_ = fused_func_op_.getArgument(3); projection_type_ = projection_.getType().cast<RankedTensorType>(); if (projection_type_.getRank() != 2) { n_output_ = n_cell_; } else { n_output_ = projection_type_.getDimSize(1); } n_input_ = weight_type_.getDimSize(0) - n_output_; num_cols_weight_transposed_ = weight_type_.getDimSize(0); num_cols_projection_transposed_ = projection_type_.getDimSize(0); bias_slice_shape_ = {n_cell_}; bias_size_values_ = {n_cell_}; weight_slice_shape_ = {1, num_cols_weight_transposed_}; weight_slice_size_input_values_ = {n_cell_, n_input_}; weight_slice_size_recurrent_values_ = {n_cell_, n_output_}; return success(); } LogicalResult ConvertLayerNormalizedLSTMCellSimpleToFusedLSTM::Initialize() { if (failed(ConvertLSTMCellSimpleToFusedLSTM::Initialize())) { return fused_func_op_.emitError() << "Specified LayerNormalizedLSTMCellSimple was not of the expected " "interface and cannot not be converted to the fused LSTM op"; } layer_norm_scale_ = fused_func_op_.getArgument(4); layer_norm_scale_type_ = layer_norm_scale_.getType().cast<RankedTensorType>(); if (layer_norm_scale_type_.getRank() != 1) { return fused_func_op_.emitError() << "The layer_norm_scale tensor was not of rank 1"; } layer_norm_slice_shape_ = {n_cell_}; layer_norm_size_values_ = {n_cell_}; return success(); } void ConvertLayerNormalizedLSTMCellSimpleToFusedLSTM:: SetCellLayerNormCoefficients() { SmallVector<int64_t, 1> begin_cell_layer_norm_values = {0}; cell_layer_norm_coefficients_ = SliceRankedTensor(&builder_, layer_norm_scale_, layer_norm_slice_shape_, begin_cell_layer_norm_values, layer_norm_slice_shape_, layer_norm_size_values_, fused_func_op_.getLoc()); } void ConvertLayerNormalizedLSTMCellSimpleToFusedLSTM:: SetInputLayerNormCoefficients() { SmallVector<int64_t, 1> begin_input_layer_norm_values = {n_cell_}; input_layer_norm_coefficients_ = couple_input_forget_gates_ ? none_ : SliceRankedTensor( &builder_, layer_norm_scale_, layer_norm_slice_shape_, begin_input_layer_norm_values, layer_norm_slice_shape_, layer_norm_size_values_, fused_func_op_.getLoc()); } void ConvertLayerNormalizedLSTMCellSimpleToFusedLSTM:: SetForgetLayerNormCoefficients() { SmallVector<int64_t, 1> begin_forget_layer_norm_values = {2 * n_cell_}; forget_layer_norm_coefficients_ = SliceRankedTensor(&builder_, layer_norm_scale_, layer_norm_slice_shape_, begin_forget_layer_norm_values, layer_norm_slice_shape_, layer_norm_size_values_, fused_func_op_.getLoc()); } void ConvertLayerNormalizedLSTMCellSimpleToFusedLSTM:: SetOutputLayerNormCoefficients() { SmallVector<int64_t, 1> begin_output_layer_norm_values = {3 * n_cell_}; output_layer_norm_coefficients_ = SliceRankedTensor(&builder_, layer_norm_scale_, layer_norm_slice_shape_, begin_output_layer_norm_values, layer_norm_slice_shape_, layer_norm_size_values_, fused_func_op_.getLoc()); } TF::ConstOp Create1DConstantOp(const std::vector<int>& value, Location loc, OpBuilder* builder) { auto type = mlir::RankedTensorType::get(value.size(), builder->getIntegerType(32)); auto dense_values = mlir::DenseIntElementsAttr::get(type, value); return builder->create<TF::ConstOp>(loc, dense_values); } TF::ConstOp CreateScalarConstantOp(int value, Location loc, OpBuilder* builder) { return builder->create<TF::ConstOp>(loc, builder->getI32IntegerAttr(value)); } LogicalResult CreateEqualSizeSplitVOp(Value input, int axis, int splits, Location loc, OpBuilder* builder, Operation** result) { auto input_type = input.getType().cast<RankedTensorType>(); SmallVector<int64_t, 4> output_shape; int size_of_splits; if (input_type.getRank() < axis || axis < 0) return failure(); for (int i = 0; i < input_type.getRank(); ++i) { int dim = input_type.getDimSize(i); if (i == axis) { if (dim % splits != 0) { return failure(); } size_of_splits = dim / splits; output_shape.push_back(size_of_splits); } else { output_shape.push_back(dim); } } SmallVector<mlir::Type, 4> output_types; for (int i = 0; i < splits; ++i) { output_types.push_back( mlir::RankedTensorType::get(output_shape, input_type.getElementType())); } auto size_of_splits_op = Create1DConstantOp( {size_of_splits, size_of_splits, size_of_splits, size_of_splits}, loc, builder); auto axis_op = CreateScalarConstantOp(axis, loc, builder); *result = builder->create<TF::SplitVOp>(loc, output_types, input, size_of_splits_op.getResult(), axis_op.getResult()); return success(); } // TODO(b/147436982): Consider refactor this to be more general. LogicalResult ConvertKerasLSTMLayer(mlir::FuncOp func_op, OpBuilder* builder) { // For argument order, please check out standard_lstm under // tensorflow/python/keras/layers/recurrent_v2.py Value input = func_op.getArgument(0); Value output_init_state = func_op.getArgument(1); Value hidden_init_state = func_op.getArgument(2); Value weight_kernel = func_op.getArgument(3); Value recurrent_kernel = func_op.getArgument(4); Value bias = func_op.getArgument(5); // The func op should have 5 outputs. if (func_op.getNumResults() != 5) return failure(); // TFL lstm only supports time-majored inputs, so if it's not time-majored, // we will transpose the inputs and outputs. auto time_major_attr = func_op.getAttrOfType<BoolAttr>("tf.time_major"); if (time_major_attr == nullptr) return failure(); bool time_majored = time_major_attr.getValue(); auto input_type = input.getType().dyn_cast_or_null<RankedTensorType>(); if (!input_type) { func_op.emitError() << "Input type is not a ranked tensor type"; return failure(); } auto final_inputs = input; auto final_input_type = input_type; // Handle go_backwards: // LSTM in Keras semantic will reverse the input sequence if it's go_backwards auto go_backwards_attr = func_op.getAttrOfType<BoolAttr>("tf.go_backwards"); if (go_backwards_attr != nullptr && go_backwards_attr.getValue()) { int time_dim = time_majored ? 0 : 1; final_inputs = Reverse(builder, final_inputs, time_dim, final_input_type, func_op.getLoc()); } int batch = time_majored ? final_input_type.getDimSize(1) : final_input_type.getDimSize(0); int time = time_majored ? final_input_type.getDimSize(0) : final_input_type.getDimSize(1); // Setup correct weights. RankedTensorType weight_type = weight_kernel.getType().cast<RankedTensorType>(); if (weight_type.getRank() != 2) return func_op.emitError() << "The weight should be rank of 2"; Value transposed_weight_kernel = Transpose2D(builder, weight_kernel, weight_type, func_op.getLoc()); RankedTensorType recurrent_kernel_type = recurrent_kernel.getType().cast<RankedTensorType>(); const int n_output = recurrent_kernel_type.getDimSize(0); Value transpose_recurrent_kernel = Transpose2D( builder, recurrent_kernel, recurrent_kernel_type, func_op.getLoc()); // Splits the weights into 4: i, f, c, o. const int splits = 4; Operation* weights_array; if (failed(CreateEqualSizeSplitVOp(transposed_weight_kernel, 0, splits, func_op.getLoc(), builder, &weights_array))) return failure(); // Splits the recurrent_weights into 4: Operation* recurrent_weights_array; if (failed(CreateEqualSizeSplitVOp(transpose_recurrent_kernel, 0, splits, func_op.getLoc(), builder, &recurrent_weights_array))) return failure(); // Splits the bias into 4: Operation* bias_array; if (failed(CreateEqualSizeSplitVOp(bias, 0, splits, func_op.getLoc(), builder, &bias_array))) return failure(); // Build the lstm op. SmallVector<int64_t, 3> output_shape; if (time_majored) { output_shape = {time, batch, n_output}; } else { output_shape = {batch, time, n_output}; } auto result_type = mlir::RankedTensorType::get( output_shape, final_inputs.getType().cast<RankedTensorType>().getElementType()); Value none = builder->create<mlir::ConstantOp>( func_op.getLoc(), builder->getNoneType(), builder->getUnitAttr()); auto lstm = builder->create<mlir::TFL::UnidirectionalSequenceLSTMOp>( func_op.getLoc(), result_type, /*input=*/final_inputs, /*input_to_input_weights=*/weights_array->getResult(0), /*input_to_forget_weights=*/weights_array->getResult(1), /*input_to_cell_weights=*/weights_array->getResult(2), /*input_to_output_weights=*/weights_array->getResult(3), /*recurrent_to_input_weights=*/recurrent_weights_array->getResult(0), /*recurrent_to_forget_weights=*/recurrent_weights_array->getResult(1), /*recurrent_to_cell_weights=*/recurrent_weights_array->getResult(2), /*recurrent_to_output_weights=*/recurrent_weights_array->getResult(3), /*cell_to_input_weights=*/none, /*cell_to_forget_weights=*/none, /*cell_to_output_weights=*/none, /*input_gate_bias=*/bias_array->getResult(0), /*forget_gate_bias=*/bias_array->getResult(1), /*cell_bias=*/bias_array->getResult(2), /*output_gate_bias=*/bias_array->getResult(3), /*projection_weights=*/none, /*projection_bias=*/none, /*input_activation_state=*/output_init_state, /*input_cell_state=*/hidden_init_state, /*input_layer_norm_coefficients=*/none, /*forget_layer_norm_coefficients=*/none, /*cell_layer_norm_coefficients=*/none, /*output_layer_norm_coefficients=*/none, builder->getStringAttr("TANH"), builder->getF32FloatAttr(10.0), builder->getF32FloatAttr(0.0), builder->getBoolAttr(time_majored)); auto final_output_full_sequences = lstm.getResult(); // Populate the last output: last output is sliced from the full sequences. // If time_major: last_output = outputs[-1, :, :] // else: last_output = outputs[:, -1, :] // // As we are creating the strided_slice op, we need to populate the following // fields: // end: should always be (0, 0, 0) // strides: should always be (1, 1, 1) // begin: should be (0, -1, 0) or (-1, 0, 0) if it's time-majored. // new_axis_mask: should always be 0. // ellipsis_mask: should always be 0. // begin_mask & end_mask: should be 0b101 = 5 or 0b110 = 4 if it's // time-majored. shrink_axis_mask: should be 0b010 = 2 or 0b001 = 1 if it's // time-majored. SmallVector<int64_t, 2> last_output_shape({batch, n_output}); SmallVector<int32_t, 3> end({0, 0, 0}); SmallVector<int32_t, 3> strides({1, 1, 1}); SmallVector<int32_t, 3> begin; int64_t new_axis_mask = 0; int64_t ellipsis_mask = 0; int64_t begin_mask; int64_t end_mask; int64_t shrink_axis_mask; if (time_majored) { begin_mask = 6; end_mask = 6; shrink_axis_mask = 1; begin = {-1, 0, 0}; } else { begin_mask = 5; end_mask = 5; shrink_axis_mask = 2; begin = {0, -1, 0}; } auto last_output = CreateStridedSliceOp( func_op.getLoc(), last_output_shape, final_output_full_sequences, begin, end, strides, begin_mask, end_mask, ellipsis_mask, new_axis_mask, shrink_axis_mask, builder); SmallVector<Value, 5> outputs; SmallVector<Type, 5> output_types; // Due to the existence of the while loop, the timestamp may be unknown // for the signature, for us, since we know the inputs, we can infer the time // steps. // Last output. outputs.push_back(last_output); output_types.push_back(last_output.getType()); // Full sequences. outputs.push_back(final_output_full_sequences); output_types.push_back(final_output_full_sequences.getType()); // All the rest: states, device. for (int i = 2; i < 5; ++i) { auto result_type = func_op.getCallableResults()[i].dyn_cast<RankedTensorType>(); outputs.push_back(CreatTfF32ConstOp(builder, result_type.getShape(), 0.0f, func_op.getLoc())); output_types.push_back(result_type); } // Update function signatures. func_op.setType(mlir::FunctionType::get(func_op.getType().getInputs(), output_types, func_op.getContext())); builder->create<mlir::ReturnOp>(func_op.getLoc(), outputs); return success(); } } // namespace TFL } // namespace mlir
apache-2.0
weolar/miniblink49
v8_4_8/src/runtime/runtime-internal.cc
13940
// Copyright 2014 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/runtime/runtime-utils.h" #include "src/arguments.h" #include "src/bootstrapper.h" #include "src/conversions.h" #include "src/debug/debug.h" #include "src/frames-inl.h" #include "src/isolate-inl.h" #include "src/messages.h" #include "src/parser.h" #include "src/prettyprinter.h" namespace v8 { namespace internal { RUNTIME_FUNCTION(Runtime_CheckIsBootstrapping) { SealHandleScope shs(isolate); DCHECK(args.length() == 0); RUNTIME_ASSERT(isolate->bootstrapper()->IsActive()); return isolate->heap()->undefined_value(); } RUNTIME_FUNCTION(Runtime_ExportFromRuntime) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, container, 0); RUNTIME_ASSERT(isolate->bootstrapper()->IsActive()); JSObject::NormalizeProperties(container, KEEP_INOBJECT_PROPERTIES, 10, "ExportFromRuntime"); Bootstrapper::ExportFromRuntime(isolate, container); JSObject::MigrateSlowToFast(container, 0, "ExportFromRuntime"); return *container; } RUNTIME_FUNCTION(Runtime_ExportExperimentalFromRuntime) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, container, 0); RUNTIME_ASSERT(isolate->bootstrapper()->IsActive()); JSObject::NormalizeProperties(container, KEEP_INOBJECT_PROPERTIES, 10, "ExportExperimentalFromRuntime"); Bootstrapper::ExportExperimentalFromRuntime(isolate, container); JSObject::MigrateSlowToFast(container, 0, "ExportExperimentalFromRuntime"); return *container; } RUNTIME_FUNCTION(Runtime_InstallToContext) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSArray, array, 0); RUNTIME_ASSERT(array->HasFastElements()); RUNTIME_ASSERT(isolate->bootstrapper()->IsActive()); Handle<Context> native_context = isolate->native_context(); Handle<FixedArray> fixed_array(FixedArray::cast(array->elements())); int length = Smi::cast(array->length())->value(); for (int i = 0; i < length; i += 2) { RUNTIME_ASSERT(fixed_array->get(i)->IsString()); Handle<String> name(String::cast(fixed_array->get(i))); RUNTIME_ASSERT(fixed_array->get(i + 1)->IsJSObject()); Handle<JSObject> object(JSObject::cast(fixed_array->get(i + 1))); int index = Context::ImportedFieldIndexForName(name); if (index == Context::kNotFound) { index = Context::IntrinsicIndexForName(name); } RUNTIME_ASSERT(index != Context::kNotFound); native_context->set(index, *object); } return isolate->heap()->undefined_value(); } RUNTIME_FUNCTION(Runtime_Throw) { HandleScope scope(isolate); DCHECK(args.length() == 1); return isolate->Throw(args[0]); } RUNTIME_FUNCTION(Runtime_ReThrow) { HandleScope scope(isolate); DCHECK(args.length() == 1); return isolate->ReThrow(args[0]); } RUNTIME_FUNCTION(Runtime_ThrowStackOverflow) { SealHandleScope shs(isolate); DCHECK_EQ(0, args.length()); return isolate->StackOverflow(); } RUNTIME_FUNCTION(Runtime_UnwindAndFindExceptionHandler) { SealHandleScope shs(isolate); DCHECK(args.length() == 0); return isolate->UnwindAndFindHandler(); } RUNTIME_FUNCTION(Runtime_PromoteScheduledException) { SealHandleScope shs(isolate); DCHECK(args.length() == 0); return isolate->PromoteScheduledException(); } RUNTIME_FUNCTION(Runtime_ThrowReferenceError) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(Object, name, 0); THROW_NEW_ERROR_RETURN_FAILURE( isolate, NewReferenceError(MessageTemplate::kNotDefined, name)); } RUNTIME_FUNCTION(Runtime_NewTypeError) { HandleScope scope(isolate); DCHECK(args.length() == 2); CONVERT_INT32_ARG_CHECKED(template_index, 0); CONVERT_ARG_HANDLE_CHECKED(Object, arg0, 1); auto message_template = static_cast<MessageTemplate::Template>(template_index); return *isolate->factory()->NewTypeError(message_template, arg0); } RUNTIME_FUNCTION(Runtime_NewReferenceError) { HandleScope scope(isolate); DCHECK(args.length() == 2); CONVERT_INT32_ARG_CHECKED(template_index, 0); CONVERT_ARG_HANDLE_CHECKED(Object, arg0, 1); auto message_template = static_cast<MessageTemplate::Template>(template_index); return *isolate->factory()->NewReferenceError(message_template, arg0); } RUNTIME_FUNCTION(Runtime_NewSyntaxError) { HandleScope scope(isolate); DCHECK(args.length() == 2); CONVERT_INT32_ARG_CHECKED(template_index, 0); CONVERT_ARG_HANDLE_CHECKED(Object, arg0, 1); auto message_template = static_cast<MessageTemplate::Template>(template_index); return *isolate->factory()->NewSyntaxError(message_template, arg0); } RUNTIME_FUNCTION(Runtime_ThrowIteratorResultNotAnObject) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(Object, value, 0); THROW_NEW_ERROR_RETURN_FAILURE( isolate, NewTypeError(MessageTemplate::kIteratorResultNotAnObject, value)); } RUNTIME_FUNCTION(Runtime_ThrowStrongModeImplicitConversion) { HandleScope scope(isolate); DCHECK(args.length() == 0); THROW_NEW_ERROR_RETURN_FAILURE( isolate, NewTypeError(MessageTemplate::kStrongImplicitConversion)); } RUNTIME_FUNCTION(Runtime_PromiseRejectEvent) { DCHECK(args.length() == 3); HandleScope scope(isolate); CONVERT_ARG_HANDLE_CHECKED(JSObject, promise, 0); CONVERT_ARG_HANDLE_CHECKED(Object, value, 1); CONVERT_BOOLEAN_ARG_CHECKED(debug_event, 2); if (debug_event) isolate->debug()->OnPromiseReject(promise, value); Handle<Symbol> key = isolate->factory()->promise_has_handler_symbol(); // Do not report if we actually have a handler. if (JSReceiver::GetDataProperty(promise, key)->IsUndefined()) { isolate->ReportPromiseReject(promise, value, v8::kPromiseRejectWithNoHandler); } return isolate->heap()->undefined_value(); } RUNTIME_FUNCTION(Runtime_PromiseRevokeReject) { DCHECK(args.length() == 1); HandleScope scope(isolate); CONVERT_ARG_HANDLE_CHECKED(JSObject, promise, 0); Handle<Symbol> key = isolate->factory()->promise_has_handler_symbol(); // At this point, no revocation has been issued before RUNTIME_ASSERT(JSReceiver::GetDataProperty(promise, key)->IsUndefined()); isolate->ReportPromiseReject(promise, Handle<Object>(), v8::kPromiseHandlerAddedAfterReject); return isolate->heap()->undefined_value(); } RUNTIME_FUNCTION(Runtime_StackGuard) { SealHandleScope shs(isolate); DCHECK(args.length() == 0); // First check if this is a real stack overflow. StackLimitCheck check(isolate); if (check.JsHasOverflowed()) { return isolate->StackOverflow(); } return isolate->stack_guard()->HandleInterrupts(); } RUNTIME_FUNCTION(Runtime_Interrupt) { SealHandleScope shs(isolate); DCHECK(args.length() == 0); return isolate->stack_guard()->HandleInterrupts(); } RUNTIME_FUNCTION(Runtime_AllocateInNewSpace) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_SMI_ARG_CHECKED(size, 0); RUNTIME_ASSERT(IsAligned(size, kPointerSize)); RUNTIME_ASSERT(size > 0); RUNTIME_ASSERT(size <= Page::kMaxRegularHeapObjectSize); return *isolate->factory()->NewFillerObject(size, false, NEW_SPACE); } RUNTIME_FUNCTION(Runtime_AllocateInTargetSpace) { HandleScope scope(isolate); DCHECK(args.length() == 2); CONVERT_SMI_ARG_CHECKED(size, 0); CONVERT_SMI_ARG_CHECKED(flags, 1); RUNTIME_ASSERT(IsAligned(size, kPointerSize)); RUNTIME_ASSERT(size > 0); RUNTIME_ASSERT(size <= Page::kMaxRegularHeapObjectSize); bool double_align = AllocateDoubleAlignFlag::decode(flags); AllocationSpace space = AllocateTargetSpace::decode(flags); return *isolate->factory()->NewFillerObject(size, double_align, space); } // Collect the raw data for a stack trace. Returns an array of 4 // element segments each containing a receiver, function, code and // native code offset. RUNTIME_FUNCTION(Runtime_CollectStackTrace) { HandleScope scope(isolate); DCHECK(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSObject, error_object, 0); CONVERT_ARG_HANDLE_CHECKED(Object, caller, 1); if (!isolate->bootstrapper()->IsActive()) { // Optionally capture a more detailed stack trace for the message. RETURN_FAILURE_ON_EXCEPTION( isolate, isolate->CaptureAndSetDetailedStackTrace(error_object)); // Capture a simple stack trace for the stack property. RETURN_FAILURE_ON_EXCEPTION( isolate, isolate->CaptureAndSetSimpleStackTrace(error_object, caller)); } return isolate->heap()->undefined_value(); } RUNTIME_FUNCTION(Runtime_MessageGetStartPosition) { SealHandleScope shs(isolate); DCHECK(args.length() == 1); CONVERT_ARG_CHECKED(JSMessageObject, message, 0); return Smi::FromInt(message->start_position()); } RUNTIME_FUNCTION(Runtime_MessageGetScript) { SealHandleScope shs(isolate); DCHECK(args.length() == 1); CONVERT_ARG_CHECKED(JSMessageObject, message, 0); return message->script(); } RUNTIME_FUNCTION(Runtime_ErrorToStringRT) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSObject, error, 0); Handle<String> result; ASSIGN_RETURN_FAILURE_ON_EXCEPTION( isolate, result, isolate->error_tostring_helper()->Stringify(isolate, error)); return *result; } RUNTIME_FUNCTION(Runtime_FormatMessageString) { HandleScope scope(isolate); DCHECK(args.length() == 4); CONVERT_INT32_ARG_CHECKED(template_index, 0); CONVERT_ARG_HANDLE_CHECKED(String, arg0, 1); CONVERT_ARG_HANDLE_CHECKED(String, arg1, 2); CONVERT_ARG_HANDLE_CHECKED(String, arg2, 3); Handle<String> result; ASSIGN_RETURN_FAILURE_ON_EXCEPTION( isolate, result, MessageTemplate::FormatMessage(template_index, arg0, arg1, arg2)); isolate->native_context()->IncrementErrorsThrown(); return *result; } #define CALLSITE_GET(NAME, RETURN) \ RUNTIME_FUNCTION(Runtime_CallSite##NAME##RT) { \ HandleScope scope(isolate); \ DCHECK(args.length() == 1); \ CONVERT_ARG_HANDLE_CHECKED(JSObject, call_site_obj, 0); \ Handle<String> result; \ CallSite call_site(isolate, call_site_obj); \ RUNTIME_ASSERT(call_site.IsValid()) \ return RETURN(call_site.NAME(), isolate); \ } static inline Object* ReturnDereferencedHandle(Handle<Object> obj, Isolate* isolate) { return *obj; } static inline Object* ReturnPositiveSmiOrNull(int value, Isolate* isolate) { if (value >= 0) return Smi::FromInt(value); return isolate->heap()->null_value(); } static inline Object* ReturnBoolean(bool value, Isolate* isolate) { return isolate->heap()->ToBoolean(value); } CALLSITE_GET(GetFileName, ReturnDereferencedHandle) CALLSITE_GET(GetFunctionName, ReturnDereferencedHandle) CALLSITE_GET(GetScriptNameOrSourceUrl, ReturnDereferencedHandle) CALLSITE_GET(GetMethodName, ReturnDereferencedHandle) CALLSITE_GET(GetLineNumber, ReturnPositiveSmiOrNull) CALLSITE_GET(GetColumnNumber, ReturnPositiveSmiOrNull) CALLSITE_GET(IsNative, ReturnBoolean) CALLSITE_GET(IsToplevel, ReturnBoolean) CALLSITE_GET(IsEval, ReturnBoolean) CALLSITE_GET(IsConstructor, ReturnBoolean) #undef CALLSITE_GET RUNTIME_FUNCTION(Runtime_IS_VAR) { UNREACHABLE(); // implemented as macro in the parser return NULL; } RUNTIME_FUNCTION(Runtime_IncrementStatsCounter) { SealHandleScope shs(isolate); DCHECK(args.length() == 1); CONVERT_ARG_CHECKED(String, name, 0); if (FLAG_native_code_counters) { StatsCounter(isolate, name->ToCString().get()).Increment(); } return isolate->heap()->undefined_value(); } RUNTIME_FUNCTION(Runtime_HarmonyToString) { // TODO(caitp): Delete this runtime method when removing --harmony-tostring return isolate->heap()->ToBoolean(FLAG_harmony_tostring); } RUNTIME_FUNCTION(Runtime_GetTypeFeedbackVector) { SealHandleScope shs(isolate); DCHECK(args.length() == 1); CONVERT_ARG_CHECKED(JSFunction, function, 0); return function->shared()->feedback_vector(); } RUNTIME_FUNCTION(Runtime_GetCallerJSFunction) { SealHandleScope shs(isolate); StackFrameIterator it(isolate); RUNTIME_ASSERT(it.frame()->type() == StackFrame::STUB); it.Advance(); RUNTIME_ASSERT(it.frame()->type() == StackFrame::JAVA_SCRIPT); return JavaScriptFrame::cast(it.frame())->function(); } RUNTIME_FUNCTION(Runtime_GetCodeStubExportsObject) { HandleScope shs(isolate); return isolate->heap()->code_stub_exports_object(); } namespace { Handle<String> RenderCallSite(Isolate* isolate, Handle<Object> object) { MessageLocation location; if (isolate->ComputeLocation(&location)) { Zone zone; base::SmartPointer<ParseInfo> info( location.function()->shared()->is_function() ? new ParseInfo(&zone, location.function()) : new ParseInfo(&zone, location.script())); if (Parser::ParseStatic(info.get())) { CallPrinter printer(isolate); const char* string = printer.Print(info->literal(), location.start_pos()); return isolate->factory()->NewStringFromAsciiChecked(string); } else { isolate->clear_pending_exception(); } } return Object::TypeOf(isolate, object); } } // namespace RUNTIME_FUNCTION(Runtime_ThrowCalledNonCallable) { HandleScope scope(isolate); DCHECK_EQ(1, args.length()); CONVERT_ARG_HANDLE_CHECKED(Object, object, 0); Handle<String> callsite = RenderCallSite(isolate, object); THROW_NEW_ERROR_RETURN_FAILURE( isolate, NewTypeError(MessageTemplate::kCalledNonCallable, callsite)); } } // namespace internal } // namespace v8
apache-2.0
tufangorel/hazelcast
hazelcast/src/main/java/com/hazelcast/client/impl/protocol/task/cache/CacheClearMessageTask.java
3397
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.impl.protocol.task.cache; import com.hazelcast.cache.impl.CacheClearResponse; import com.hazelcast.cache.impl.CacheOperationProvider; import com.hazelcast.cache.impl.CacheService; import com.hazelcast.cache.impl.operation.CacheClearOperationFactory; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.client.impl.protocol.codec.CacheClearCodec; import com.hazelcast.instance.Node; import com.hazelcast.nio.Connection; import com.hazelcast.security.permission.ActionConstants; import com.hazelcast.security.permission.CachePermission; import com.hazelcast.spi.OperationFactory; import javax.cache.CacheException; import java.security.Permission; import java.util.Map; /** * This client request specifically calls {@link CacheClearOperationFactory} on the server side. * * @see CacheClearOperationFactory */ public class CacheClearMessageTask extends AbstractCacheAllPartitionsTask<CacheClearCodec.RequestParameters> { public CacheClearMessageTask(ClientMessage clientMessage, Node node, Connection connection) { super(clientMessage, node, connection); } @Override protected CacheClearCodec.RequestParameters decodeClientMessage(ClientMessage clientMessage) { return CacheClearCodec.decodeRequest(clientMessage); } @Override protected ClientMessage encodeResponse(Object response) { return CacheClearCodec.encodeResponse(); } @Override protected OperationFactory createOperationFactory() { CacheOperationProvider operationProvider = getOperationProvider(parameters.name); return operationProvider.createClearOperationFactory(); } @Override protected Object reduce(Map<Integer, Object> map) { for (Map.Entry<Integer, Object> entry : map.entrySet()) { if (entry.getValue() == null) { continue; } final CacheClearResponse cacheClearResponse = (CacheClearResponse) nodeEngine.toObject(entry.getValue()); final Object response = cacheClearResponse.getResponse(); if (response instanceof CacheException) { throw (CacheException) response; } } return null; } @Override public Permission getRequiredPermission() { return new CachePermission(parameters.name, ActionConstants.ACTION_REMOVE); } @Override public String getServiceName() { return CacheService.SERVICE_NAME; } @Override public String getDistributedObjectName() { return parameters.name; } @Override public Object[] getParameters() { return null; } @Override public String getMethodName() { return "clear"; } }
apache-2.0
ericmckean/collide
java/com/google/collide/client/workspace/FileTreeModel.java
23794
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.collide.client.workspace; import com.google.collide.client.bootstrap.BootstrapSession; import com.google.collide.client.ui.tree.TreeNodeElement; import com.google.collide.client.util.PathUtil; import com.google.collide.client.util.logging.Log; import com.google.collide.client.workspace.FileTreeModelNetworkController.OutgoingController; import com.google.collide.dto.DirInfo; import com.google.collide.dto.Mutation; import com.google.collide.dto.ServerError.FailureReason; import com.google.collide.dto.WorkspaceTreeUpdate; import com.google.collide.dto.client.DtoClientImpls.DirInfoImpl; import com.google.collide.dto.client.DtoClientImpls.WorkspaceTreeUpdateImpl; import com.google.collide.json.client.JsoArray; import com.google.collide.json.shared.JsonArray; import com.google.collide.shared.util.JsonCollections; import com.google.collide.shared.util.StringUtils; import com.google.common.base.Preconditions; import javax.annotation.Nullable; /** * Public API for interacting with the client side workspace file tree model. * Also exposes callbacks for mutations that have been applied to the model. * * If you want to mutate the workspace file tree, which is a tree of * {@link FileTreeNode}'s you need to go through here. */ public class FileTreeModel { /** * Callback interface for requesting the root node, potentially * asynchronously. */ public interface RootNodeRequestCallback { void onRootNodeAvailable(FileTreeNode root); } /** * Callback interface for requesting a node, potentially asynchronously. */ public interface NodeRequestCallback { void onNodeAvailable(FileTreeNode node); /** * Called if the node does not exist. */ void onNodeUnavailable(); /** * Called if an error occurs while loading the node. */ void onError(FailureReason reason); } /** * Callback interface for getting notified about changes to the workspace tree * model that have been applied by the FileTreeController. */ public interface TreeModelChangeListener { /** * Notification that a node was added. */ void onNodeAdded(PathUtil parentDirPath, FileTreeNode newNode); /** * Notification that a node was moved/renamed. * * @param oldPath the old node path * @param node the node that was moved, or null if the old path is not loaded. If both the old * path and the new path are loaded, node == newNode and node's parent will be the target * directory of the new path. If the new path is not loaded, node is the node that was in * the old path. * @param newPath the new node path * @param newNode the new node, or null if the target directory is not loaded */ void onNodeMoved(PathUtil oldPath, FileTreeNode node, PathUtil newPath, FileTreeNode newNode); /** * Notification that a set of nodes was removed. * * @param oldNodes a list of nodes that we removed. Every node will still have its parent filled */ void onNodesRemoved(JsonArray<FileTreeNode> oldNodes); /** * Notification that a node was replaced (can be either a file or directory). * * @param oldNode the existing node that used to be in the file tree, or null if the workspace * root is being set for the first time * @param newNode the node that replaces the {@code oldNode}. This will be the same * {@link FileTreeNode#getNodeType()} as the node it is replacing. */ void onNodeReplaced(@Nullable FileTreeNode oldNode, FileTreeNode newNode); } /** * A {@link TreeModelChangeListener} which does not perform any operations in * response to an event. Its only purpose is to allow clients to only override * the events matter to them. */ public abstract static class AbstractTreeModelChangeListener implements TreeModelChangeListener { @Override public void onNodeAdded(PathUtil parentDirPath, FileTreeNode newNode) { // intentional no-op, clients should override if needed } @Override public void onNodeMoved( PathUtil oldPath, FileTreeNode node, PathUtil newPath, FileTreeNode newNode) { // intentional no-op, clients should override if needed } @Override public void onNodesRemoved(JsonArray<FileTreeNode> oldNodes) { // intentional no-op, clients should override if needed } @Override public void onNodeReplaced(FileTreeNode oldDir, FileTreeNode newDir) { // intentional no-op, clients should override if needed } } /** * A {@link TreeModelChangeListener} that performs the exact same action in * response to any and all tree mutations. */ public abstract static class BasicTreeModelChangeListener implements TreeModelChangeListener { public abstract void onTreeModelChange(); @Override public void onNodeAdded(PathUtil parentDirPath, FileTreeNode newNode) { onTreeModelChange(); } @Override public void onNodeMoved( PathUtil oldPath, FileTreeNode node, PathUtil newPath, FileTreeNode newNode) { onTreeModelChange(); } @Override public void onNodesRemoved(JsonArray<FileTreeNode> oldNodes) { onTreeModelChange(); } @Override public void onNodeReplaced(FileTreeNode oldDir, FileTreeNode newDir) { onTreeModelChange(); } } private interface ChangeDispatcher { void dispatch(TreeModelChangeListener changeListener); } private final JsoArray<TreeModelChangeListener> modelChangeListeners = JsoArray.create(); private final OutgoingController outgoingNetworkController; private FileTreeNode workspaceRoot; private boolean disableChangeNotifications; /** * Tree revision that corresponds to the revision of the last * successfully applied tree mutation that this client is aware of. */ private String lastAppliedTreeMutationRevision = "0"; public FileTreeModel( FileTreeModelNetworkController.OutgoingController outgoingNetworkController) { this.outgoingNetworkController = outgoingNetworkController; } /** * Adds a node to our model by path. */ public void addNode(PathUtil path, final FileTreeNode newNode, String workspaceRootId) { if (workspaceRoot == null) { // TODO: queue up this add? Log.warn(getClass(), "Attempting to add a node before the root is set", path); return; } // Find the parent directory of the node. final PathUtil parentDirPath = PathUtil.createExcludingLastN(path, 1); FileTreeNode parentDir = getWorkspaceRoot().findChildNode(parentDirPath); if (parentDir != null && parentDir.isComplete()) { // The parent directory is complete, so add the node. addNode(parentDir, newNode, workspaceRootId); } else { // The parent directory isn't complete, so do not add the node to the model, but update the // workspace root id. maybeSetLastAppliedTreeMutationRevision(workspaceRootId); } } /** * Adds a node to the model under the specified parent node. */ public void addNode(FileTreeNode parentDir, FileTreeNode childNode, String workspaceRootId) { addNodeNoDispatch(parentDir, childNode); dispatchAddNode(parentDir, childNode, workspaceRootId); } private void addNodeNoDispatch(final FileTreeNode parentDir, final FileTreeNode childNode) { if (parentDir == null) { Log.error(getClass(), "Trying to add a child to a null parent!", childNode); return; } Log.debug(getClass(), "Adding ", childNode, " - to - ", parentDir); parentDir.addChild(childNode); } /** * Manually dispatch that a node was added. */ void dispatchAddNode( final FileTreeNode parentDir, final FileTreeNode childNode, final String workspaceRootId) { dispatchModelChange(new ChangeDispatcher() { @Override public void dispatch(TreeModelChangeListener changeListener) { changeListener.onNodeAdded(parentDir.getNodePath(), childNode); } }, workspaceRootId); } /** * Moves/renames a node in the model. */ public void moveNode( final PathUtil oldPath, final PathUtil newPath, final String workspaceRootId) { if (workspaceRoot == null) { // TODO: queue up this move? Log.warn(getClass(), "Attempting to move a node before the root is set", oldPath); return; } // Remove the node from its old path if the old directory is complete. final FileTreeNode oldNode = workspaceRoot.findChildNode(oldPath); if (oldNode == null) { /* * No node found at the old path - either it isn't loaded, or we optimistically updated * already. Verify that one of those is the case. */ Preconditions.checkState(workspaceRoot.findClosestChildNode(oldPath) != null || workspaceRoot.findChildNode(newPath) != null); } else { oldNode.setName(newPath.getBaseName()); oldNode.getParent().removeChild(oldNode); } // Apply the new root id. maybeSetLastAppliedTreeMutationRevision(workspaceRootId); // Prepare a callback that will dispatch the onNodeMove event to listeners. NodeRequestCallback callback = new NodeRequestCallback() { @Override public void onNodeAvailable(FileTreeNode newNode) { /* * If we had to request the target directory, replace the target node with the oldNode to * ensure that all properties (such as the rendered node and the fileEditSessionKey) are * copied over correctly. */ if (oldNode != null && newNode != null && newNode != oldNode) { newNode.replaceWith(oldNode); newNode = oldNode; } // Dispatch a change event. final FileTreeNode finalNewNode = newNode; dispatchModelChange(new ChangeDispatcher() { @Override public void dispatch(TreeModelChangeListener changeListener) { changeListener.onNodeMoved(oldPath, oldNode, newPath, finalNewNode); } }, workspaceRootId); } @Override public void onNodeUnavailable() { // The node should be available because we are requesting the node using the root ID // immediately after the move. Log.error(getClass(), "Could not find moved node using the workspace root ID immediately after the move"); } @Override public void onError(FailureReason reason) { // Error already logged. } }; // Request the target directory. final PathUtil parentDirPath = PathUtil.createExcludingLastN(newPath, 1); FileTreeNode parentDir = workspaceRoot.findChildNode(parentDirPath); if (parentDir == null || !parentDir.isComplete()) { if (oldNode == null) { // Early exit if neither the old node nor the target directory is loaded. return; } else { // If the parent directory was not loaded, don't bother loading it. callback.onNodeAvailable(null); } } else { if (oldNode == null) { // The old node doesn't exist, so we need to force a refresh of the target directory's // children by marking the target directory incomplete. DirInfoImpl parentDirView = parentDir.cast(); parentDirView.setIsComplete(false); } else { // The old node exists and the target directory is loaded, so add the node to the target. parentDir.addChild(oldNode); } // Request the new node. requestWorkspaceNode(newPath, callback); } } /** * Removes a node from the model. * * @param toDelete the {@link FileTreeNode} we want to remove. * @param workspaceRootId the new file tree revision * @return the node that was deleted from the model. This will return * {@code null} if the input node is null or if the input node does * not have a parent. Meaning if the input node is the root, this * method will return {@code null}. */ public FileTreeNode removeNode(final FileTreeNode toDelete, String workspaceRootId) { // If we found a node at the specified path, then remove it. if (deleteNodeNoDispatch(toDelete)) { final JsonArray<FileTreeNode> deletedNode = JsonCollections.createArray(toDelete); dispatchModelChange(new ChangeDispatcher() { @Override public void dispatch(TreeModelChangeListener changeListener) { changeListener.onNodesRemoved(deletedNode); } }, workspaceRootId); return toDelete; } return null; } /** * Removes a set of nodes from the model. * * @param toDelete the {@link PathUtil}s for the nodes we want to remove. * @param workspaceRootId the new file tree revision * @return the nodes that were deleted from the model. This will return an * empty list if we try to add a node before we have a root node set, * or if the specified path does not exist.. */ public JsonArray<FileTreeNode> removeNodes( final JsonArray<PathUtil> toDelete, String workspaceRootId) { if (workspaceRoot == null) { // TODO: queue up this remove? Log.warn(getClass(), "Attempting to remove nodes before the root is set"); return null; } final JsonArray<FileTreeNode> deletedNodes = JsonCollections.createArray(); for (int i = 0; i < toDelete.size(); i++) { FileTreeNode node = workspaceRoot.findChildNode(toDelete.get(i)); if (deleteNodeNoDispatch(node)) { deletedNodes.add(node); } } if (deletedNodes.size() == 0) { // if none of the nodes created a need to update the UI, just return an // empty list. return deletedNodes; } dispatchModelChange(new ChangeDispatcher() { @Override public void dispatch(TreeModelChangeListener changeListener) { changeListener.onNodesRemoved(deletedNodes); } }, workspaceRootId); return deletedNodes; } /** * Deletes a single node (does not update the UI). */ private boolean deleteNodeNoDispatch(FileTreeNode node) { if (node == null || node.getParent() == null) { return false; } FileTreeNode parent = node.getParent(); // Guard against someone installing a node of the same name in the parent // (meaning we are already gone. if (!node.equals(parent.getChildNode(node.getName()))) { // This means that the node we are removing from the tree is already // effectively removed from where it thinks it is. return false; } node.getParent().removeChild(node); return true; } /** * Replaces either the root node for this tree model, or replaces an existing directory node, or * replaces an existing file node. */ public void replaceNode(PathUtil path, final FileTreeNode newNode, String workspaceRootId) { if (newNode == null) { return; } if (PathUtil.WORKSPACE_ROOT.equals(path)) { // Install the workspace root. final FileTreeNode oldDir = workspaceRoot; workspaceRoot = newNode; dispatchModelChange(new ChangeDispatcher() { @Override public void dispatch(TreeModelChangeListener changeListener) { changeListener.onNodeReplaced(oldDir, newNode); } }, workspaceRootId); } else { // Patch the model if there is one. if (workspaceRoot != null) { final FileTreeNode nodeToReplace = workspaceRoot.findChildNode(path); // Note. We do not support patching subtrees that don't already // exist. This subtree must have already existed, or have been // preceded by an ADD or COPY mutation. if (nodeToReplace == null) { return; } nodeToReplace.replaceWith(newNode); dispatchModelChange(new ChangeDispatcher() { @Override public void dispatch(TreeModelChangeListener changeListener) { changeListener.onNodeReplaced(nodeToReplace, newNode); } }, workspaceRootId); } } } /** * @return the current value of the workspaceRoot. Potentially {@code null} if * the model has not yet been populated. */ public FileTreeNode getWorkspaceRoot() { return workspaceRoot; } /** * Asks for the root node, potentially asynchronously if the model is not yet * populated. If the root node is already available then the callback will be * invoked synchronously. */ public void requestWorkspaceRoot(final RootNodeRequestCallback callback) { FileTreeNode rootNode = getWorkspaceRoot(); if (rootNode == null) { // Wait for the model to be populated. addModelChangeListener(new AbstractTreeModelChangeListener() { @Override public void onNodeReplaced(FileTreeNode oldNode, FileTreeNode newNode) { Preconditions.checkArgument(newNode.getNodePath().equals(PathUtil.WORKSPACE_ROOT), "Unexpected non-workspace root subtree replaced before workspace root was replaced: " + newNode.toString()); // Should be resilient to concurrent modification! removeModelChangeListener(this); callback.onRootNodeAvailable(getWorkspaceRoot()); } }); return; } callback.onRootNodeAvailable(rootNode); } /** * Adds a {@link TreeModelChangeListener} to be notified of mutations applied * by the FileTreeController to the underlying workspace file tree model. * * @param modelChangeListener the listener we are adding */ public void addModelChangeListener(TreeModelChangeListener modelChangeListener) { modelChangeListeners.add(modelChangeListener); } /** * Removes a {@link TreeModelChangeListener} from the set of listeners * subscribed to model changes. */ public void removeModelChangeListener(TreeModelChangeListener modelChangeListener) { modelChangeListeners.remove(modelChangeListener); } public void setDisableChangeNotifications(boolean disable) { this.disableChangeNotifications = disable; } private void dispatchModelChange(ChangeDispatcher dispatcher, String workspaceRootId) { // Update the tracked tip ID. maybeSetLastAppliedTreeMutationRevision(workspaceRootId); if (disableChangeNotifications) { return; } JsoArray<TreeModelChangeListener> copy = modelChangeListeners.slice( 0, modelChangeListeners.size()); for (int i = 0, n = copy.size(); i < n; i++) { dispatcher.dispatch(copy.get(i)); } } /** * @return the file tree revision associated with the last seen Tree mutation. */ public String getLastAppliedTreeMutationRevision() { return lastAppliedTreeMutationRevision; } /** * Bumps the tracked Root ID for the last applied tree mutation, if the * version happens to be larger than the version we are tracking. */ public void maybeSetLastAppliedTreeMutationRevision(String lastAppliedTreeMutationRevision) { // TODO: Ensure numeric comparison survives ID obfuscation. try { long newRevision = StringUtils.toLong(lastAppliedTreeMutationRevision); long lastRevision = StringUtils.toLong(this.lastAppliedTreeMutationRevision); this.lastAppliedTreeMutationRevision = (newRevision > lastRevision) ? lastAppliedTreeMutationRevision : this.lastAppliedTreeMutationRevision; // TODO: this should be monotonically increasing; if it's not, we missed an update. } catch (NumberFormatException e) { Log.error(getClass(), "Root ID is not a numeric long!", lastAppliedTreeMutationRevision); } } /** * Folks that want to mutate the file tree should obtain a skeletal {@link WorkspaceTreeUpdate} * using this factory method. */ public WorkspaceTreeUpdateImpl makeEmptyTreeUpdate() { if (this.lastAppliedTreeMutationRevision == null) { throw new IllegalStateException( "Attempted to mutate the tree before the workspace file tree was loaded at least once!"); } return WorkspaceTreeUpdateImpl.make() .setAuthorClientId(BootstrapSession.getBootstrapSession().getActiveClientId()) .setMutations(JsoArray.<Mutation>create()); } /** * Calculates the list of expanded paths. The list only contains the paths of the deepest expanded * directories. Parent directories are assumed to be open as well. * * @return the list of expanded paths, or null if the workspace root is not loaded */ public JsoArray<String> calculateExpandedPaths() { // Early exit if the root isn't loaded yet. if (workspaceRoot == null) { return null; } // Walk the tree looking for expanded paths. JsoArray<String> expandedPaths = JsoArray.create(); calculateExpandedPathsRecursive(workspaceRoot, expandedPaths); return expandedPaths; } /** * Calculates the list of expanded paths beneath the specified node and adds them to expanded * path. If none of the children * * @param node the directory containing the expanded paths * @param expandedPaths the running list of expanded paths */ private void calculateExpandedPathsRecursive(FileTreeNode node, JsoArray<String> expandedPaths) { assert node.isDirectory() : "node must be a directory"; // Check if the directory is expanded. The root is always expanded. if (node != workspaceRoot) { TreeNodeElement<FileTreeNode> dirElem = node.getRenderedTreeNode(); if (!dirElem.isOpen()) { return; } } // Recursively search for expanded subdirectories. int expandedPathsCount = expandedPaths.size(); DirInfoImpl dir = node.cast(); JsonArray<DirInfo> subDirs = dir.getSubDirectories(); if (subDirs != null) { for (int i = 0; i < subDirs.size(); i++) { DirInfo subDir = subDirs.get(i); calculateExpandedPathsRecursive((FileTreeNode) subDir, expandedPaths); } } // Add this directory if none of its descendants were added. if (expandedPathsCount == expandedPaths.size()) { expandedPaths.add(node.getNodePath().getPathString()); } } /** * Asks for the node at the specified path, potentially asynchronously if the model does not yet * contain the node. If the node is already available then the callback will be invoked * synchronously. * * @param path the path to the node, which must be a file (not a directory) * @param callback the callback to invoke when the node is ready */ public void requestWorkspaceNode(final PathUtil path, final NodeRequestCallback callback) { outgoingNetworkController.requestWorkspaceNode(this, path, callback); } /** * Asks for the children of the specified node. * * @param node a directory node * @param callback an optional callback that will be notified once the children are fetched. If * null, this method will alert the user if there was an error */ public void requestDirectoryChildren(FileTreeNode node, @Nullable final NodeRequestCallback callback) { outgoingNetworkController.requestDirectoryChildren(this, node, callback); } }
apache-2.0
kewu1992/test-infra
prow/plugins/trigger/trigger.go
2751
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package trigger import ( "fmt" "github.com/Sirupsen/logrus" "k8s.io/test-infra/prow/config" "k8s.io/test-infra/prow/github" "k8s.io/test-infra/prow/kube" "k8s.io/test-infra/prow/plugins" ) const ( pluginName = "trigger" lgtmLabel = "lgtm" ) func init() { plugins.RegisterIssueCommentHandler(pluginName, handleIssueComment) plugins.RegisterPullRequestHandler(pluginName, handlePullRequest) plugins.RegisterPushEventHandler(pluginName, handlePush) } type githubClient interface { AddLabel(org, repo string, number int, label string) error BotName() string IsMember(org, user string) (bool, error) GetPullRequest(org, repo string, number int) (*github.PullRequest, error) GetRef(org, repo, ref string) (string, error) CreateComment(owner, repo string, number int, comment string) error ListIssueComments(owner, repo string, issue int) ([]github.IssueComment, error) CreateStatus(owner, repo, ref string, status github.Status) error GetCombinedStatus(org, repo, ref string) (*github.CombinedStatus, error) GetPullRequestChanges(org, repo string, number int) ([]github.PullRequestChange, error) RemoveLabel(org, repo string, number int, label string) error } type kubeClient interface { CreateProwJob(kube.ProwJob) (kube.ProwJob, error) } type client struct { GitHubClient githubClient KubeClient kubeClient Config *config.Config Logger *logrus.Entry } func triggerConfig(c *config.Config, org, repo string) *config.Trigger { for _, tr := range c.Triggers { for _, r := range tr.Repos { if r == org || r == fmt.Sprintf("%s/%s", org, repo) { return &tr } } } return nil } func getClient(pc plugins.PluginClient) client { return client{ GitHubClient: pc.GitHubClient, Config: pc.Config, KubeClient: pc.KubeClient, Logger: pc.Logger, } } func handlePullRequest(pc plugins.PluginClient, pr github.PullRequestEvent) error { return handlePR(getClient(pc), pr) } func handleIssueComment(pc plugins.PluginClient, ic github.IssueCommentEvent) error { return handleIC(getClient(pc), ic) } func handlePush(pc plugins.PluginClient, pe github.PushEvent) error { return handlePE(getClient(pc), pe) }
apache-2.0
chanakaudaya/developer-studio
esb/org.wso2.developerstudio.eclipse.gmf.esb/src/org/wso2/developerstudio/eclipse/gmf/esb/LocalEntry.java
5960
/** * <copyright> * </copyright> * * $Id$ */ package org.wso2.developerstudio.eclipse.gmf.esb; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Local Entry</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getEntryName <em>Entry Name</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getValueType <em>Value Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getValueLiteral <em>Value Literal</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getValueXML <em>Value XML</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getValueURL <em>Value URL</em>}</li> * </ul> * </p> * * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getLocalEntry() * @model * @generated */ public interface LocalEntry extends EsbElement { /** * Returns the value of the '<em><b>Entry Name</b></em>' attribute. * The default value is <code>"entry_name"</code>. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Entry Name</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Entry Name</em>' attribute. * @see #setEntryName(String) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getLocalEntry_EntryName() * @model default="entry_name" * @generated */ String getEntryName(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getEntryName <em>Entry Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Entry Name</em>' attribute. * @see #getEntryName() * @generated */ void setEntryName(String value); /** * Returns the value of the '<em><b>Value Type</b></em>' attribute. * The default value is <code>"LITERAL"</code>. * The literals are from the enumeration {@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntryValueType}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Value Type</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Value Type</em>' attribute. * @see org.wso2.developerstudio.eclipse.gmf.esb.LocalEntryValueType * @see #setValueType(LocalEntryValueType) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getLocalEntry_ValueType() * @model default="LITERAL" * @generated */ LocalEntryValueType getValueType(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getValueType <em>Value Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Value Type</em>' attribute. * @see org.wso2.developerstudio.eclipse.gmf.esb.LocalEntryValueType * @see #getValueType() * @generated */ void setValueType(LocalEntryValueType value); /** * Returns the value of the '<em><b>Value Literal</b></em>' attribute. * The default value is <code>"entry_value"</code>. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Value Literal</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Value Literal</em>' attribute. * @see #setValueLiteral(String) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getLocalEntry_ValueLiteral() * @model default="entry_value" * @generated */ String getValueLiteral(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getValueLiteral <em>Value Literal</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Value Literal</em>' attribute. * @see #getValueLiteral() * @generated */ void setValueLiteral(String value); /** * Returns the value of the '<em><b>Value XML</b></em>' attribute. * The default value is <code>"<value/>"</code>. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Value XML</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Value XML</em>' attribute. * @see #setValueXML(String) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getLocalEntry_ValueXML() * @model default="<value/>" * @generated */ String getValueXML(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getValueXML <em>Value XML</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Value XML</em>' attribute. * @see #getValueXML() * @generated */ void setValueXML(String value); /** * Returns the value of the '<em><b>Value URL</b></em>' attribute. * The default value is <code>"file:/path/to/resource.ext"</code>. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Value URL</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Value URL</em>' attribute. * @see #setValueURL(String) * @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getLocalEntry_ValueURL() * @model default="file:/path/to/resource.ext" * @generated */ String getValueURL(); /** * Sets the value of the '{@link org.wso2.developerstudio.eclipse.gmf.esb.LocalEntry#getValueURL <em>Value URL</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Value URL</em>' attribute. * @see #getValueURL() * @generated */ void setValueURL(String value); } // LocalEntry
apache-2.0
MobileRez/XFXamlClass
Lab Materials/Part 1/Completed/MeasurementConverter/MeasurementConverter.Droid/obj/Debug/android/src/md5b60ffeb829f638581ab2bb9b1a7f4f3f/ButtonRenderer.java
3388
package md5b60ffeb829f638581ab2bb9b1a7f4f3f; public class ButtonRenderer extends md5b60ffeb829f638581ab2bb9b1a7f4f3f.ViewRenderer_2 implements mono.android.IGCUserPeer, android.view.View.OnAttachStateChangeListener { static final String __md_methods; static { __md_methods = "n_onViewAttachedToWindow:(Landroid/view/View;)V:GetOnViewAttachedToWindow_Landroid_view_View_Handler:Android.Views.View/IOnAttachStateChangeListenerInvoker, Mono.Android, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null\n" + "n_onViewDetachedFromWindow:(Landroid/view/View;)V:GetOnViewDetachedFromWindow_Landroid_view_View_Handler:Android.Views.View/IOnAttachStateChangeListenerInvoker, Mono.Android, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null\n" + ""; mono.android.Runtime.register ("Xamarin.Forms.Platform.Android.ButtonRenderer, Xamarin.Forms.Platform.Android, Version=2.0.0.0, Culture=neutral, PublicKeyToken=null", ButtonRenderer.class, __md_methods); } public ButtonRenderer (android.content.Context p0) throws java.lang.Throwable { super (p0); if (getClass () == ButtonRenderer.class) mono.android.TypeManager.Activate ("Xamarin.Forms.Platform.Android.ButtonRenderer, Xamarin.Forms.Platform.Android, Version=2.0.0.0, Culture=neutral, PublicKeyToken=null", "Android.Content.Context, Mono.Android, Version=0.0.0.0, Culture=neutral, PublicKeyToken=84e04ff9cfb79065", this, new java.lang.Object[] { p0 }); } public ButtonRenderer (android.content.Context p0, android.util.AttributeSet p1) throws java.lang.Throwable { super (p0, p1); if (getClass () == ButtonRenderer.class) mono.android.TypeManager.Activate ("Xamarin.Forms.Platform.Android.ButtonRenderer, Xamarin.Forms.Platform.Android, Version=2.0.0.0, Culture=neutral, PublicKeyToken=null", "Android.Content.Context, Mono.Android, Version=0.0.0.0, Culture=neutral, PublicKeyToken=84e04ff9cfb79065:Android.Util.IAttributeSet, Mono.Android, Version=0.0.0.0, Culture=neutral, PublicKeyToken=84e04ff9cfb79065", this, new java.lang.Object[] { p0, p1 }); } public ButtonRenderer (android.content.Context p0, android.util.AttributeSet p1, int p2) throws java.lang.Throwable { super (p0, p1, p2); if (getClass () == ButtonRenderer.class) mono.android.TypeManager.Activate ("Xamarin.Forms.Platform.Android.ButtonRenderer, Xamarin.Forms.Platform.Android, Version=2.0.0.0, Culture=neutral, PublicKeyToken=null", "Android.Content.Context, Mono.Android, Version=0.0.0.0, Culture=neutral, PublicKeyToken=84e04ff9cfb79065:Android.Util.IAttributeSet, Mono.Android, Version=0.0.0.0, Culture=neutral, PublicKeyToken=84e04ff9cfb79065:System.Int32, mscorlib, Version=2.0.5.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e", this, new java.lang.Object[] { p0, p1, p2 }); } public void onViewAttachedToWindow (android.view.View p0) { n_onViewAttachedToWindow (p0); } private native void n_onViewAttachedToWindow (android.view.View p0); public void onViewDetachedFromWindow (android.view.View p0) { n_onViewDetachedFromWindow (p0); } private native void n_onViewDetachedFromWindow (android.view.View p0); java.util.ArrayList refList; public void monodroidAddReference (java.lang.Object obj) { if (refList == null) refList = new java.util.ArrayList (); refList.add (obj); } public void monodroidClearReferences () { if (refList != null) refList.clear (); } }
apache-2.0
TommesDee/cpachecker
test/programs/parser/cbmc/Endianness5/main.c
205
unsigned char regb[100]; unsigned short *ptrUShort; unsigned short shortTmp; int main() { ptrUShort = (unsigned short*)(&regb[12]); shortTmp= *ptrUShort; // should pass *ptrUShort = 1234; }
apache-2.0
mhurne/aws-sdk-java
aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront/model/CacheBehaviors.java
6768
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; /** * A complex type that contains zero or more CacheBehavior elements. */ public class CacheBehaviors implements Serializable, Cloneable { /** The number of cache behaviors for this distribution. */ private Integer quantity; /** * Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. */ private com.amazonaws.internal.SdkInternalList<CacheBehavior> items; /** * The number of cache behaviors for this distribution. * * @param quantity * The number of cache behaviors for this distribution. */ public void setQuantity(Integer quantity) { this.quantity = quantity; } /** * The number of cache behaviors for this distribution. * * @return The number of cache behaviors for this distribution. */ public Integer getQuantity() { return this.quantity; } /** * The number of cache behaviors for this distribution. * * @param quantity * The number of cache behaviors for this distribution. * @return Returns a reference to this object so that method calls can be * chained together. */ public CacheBehaviors withQuantity(Integer quantity) { setQuantity(quantity); return this; } /** * Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. * * @return Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. */ public java.util.List<CacheBehavior> getItems() { if (items == null) { items = new com.amazonaws.internal.SdkInternalList<CacheBehavior>(); } return items; } /** * Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. * * @param items * Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. */ public void setItems(java.util.Collection<CacheBehavior> items) { if (items == null) { this.items = null; return; } this.items = new com.amazonaws.internal.SdkInternalList<CacheBehavior>( items); } /** * Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setItems(java.util.Collection)} or * {@link #withItems(java.util.Collection)} if you want to override the * existing values. * </p> * * @param items * Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. * @return Returns a reference to this object so that method calls can be * chained together. */ public CacheBehaviors withItems(CacheBehavior... items) { if (this.items == null) { setItems(new com.amazonaws.internal.SdkInternalList<CacheBehavior>( items.length)); } for (CacheBehavior ele : items) { this.items.add(ele); } return this; } /** * Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. * * @param items * Optional: A complex type that contains cache behaviors for this * distribution. If Quantity is 0, you can omit Items. * @return Returns a reference to this object so that method calls can be * chained together. */ public CacheBehaviors withItems(java.util.Collection<CacheBehavior> items) { setItems(items); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getQuantity() != null) sb.append("Quantity: " + getQuantity() + ","); if (getItems() != null) sb.append("Items: " + getItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CacheBehaviors == false) return false; CacheBehaviors other = (CacheBehaviors) obj; if (other.getQuantity() == null ^ this.getQuantity() == null) return false; if (other.getQuantity() != null && other.getQuantity().equals(this.getQuantity()) == false) return false; if (other.getItems() == null ^ this.getItems() == null) return false; if (other.getItems() != null && other.getItems().equals(this.getItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getQuantity() == null) ? 0 : getQuantity().hashCode()); hashCode = prime * hashCode + ((getItems() == null) ? 0 : getItems().hashCode()); return hashCode; } @Override public CacheBehaviors clone() { try { return (CacheBehaviors) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
ern/elasticsearch
client/rest-high-level/src/main/java/org/elasticsearch/client/security/GetUserPrivilegesResponse.java
5778
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.client.security; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.security.user.privileges.ApplicationResourcePrivileges; import org.elasticsearch.client.security.user.privileges.GlobalPrivileges; import org.elasticsearch.client.security.user.privileges.UserIndicesPrivileges; import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.Objects; import java.util.Set; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; /** * The response for the {@link org.elasticsearch.client.SecurityClient#getUserPrivileges(RequestOptions)} API. * See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user-privileges.html">the API docs</a> */ public class GetUserPrivilegesResponse { private static final ConstructingObjectParser<GetUserPrivilegesResponse, Void> PARSER = new ConstructingObjectParser<>( "get_user_privileges_response", true, GetUserPrivilegesResponse::buildResponseFromParserArgs); @SuppressWarnings("unchecked") private static GetUserPrivilegesResponse buildResponseFromParserArgs(Object[] args) { return new GetUserPrivilegesResponse( (Collection<String>) args[0], (Collection<GlobalPrivileges>) args[1], (Collection<UserIndicesPrivileges>) args[2], (Collection<ApplicationResourcePrivileges>) args[3], (Collection<String>) args[4] ); } static { PARSER.declareStringArray(constructorArg(), new ParseField("cluster")); PARSER.declareObjectArray(constructorArg(), (parser, ignore) -> GlobalPrivileges.fromXContent(parser), new ParseField("global")); PARSER.declareObjectArray(constructorArg(), (parser, ignore) -> UserIndicesPrivileges.fromXContent(parser), new ParseField("indices")); PARSER.declareObjectArray(constructorArg(), (parser, ignore) -> ApplicationResourcePrivileges.fromXContent(parser), new ParseField("applications")); PARSER.declareStringArray(constructorArg(), new ParseField("run_as")); } public static GetUserPrivilegesResponse fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } private Set<String> clusterPrivileges; private Set<GlobalPrivileges> globalPrivileges; private Set<UserIndicesPrivileges> indicesPrivileges; private Set<ApplicationResourcePrivileges> applicationPrivileges; private Set<String> runAsPrivilege; public GetUserPrivilegesResponse(Collection<String> clusterPrivileges, Collection<GlobalPrivileges> globalPrivileges, Collection<UserIndicesPrivileges> indicesPrivileges, Collection<ApplicationResourcePrivileges> applicationPrivileges, Collection<String> runAsPrivilege) { this.clusterPrivileges = Collections.unmodifiableSet(new LinkedHashSet<>(clusterPrivileges)); this.globalPrivileges = Collections.unmodifiableSet(new LinkedHashSet<>(globalPrivileges)); this.indicesPrivileges = Collections.unmodifiableSet(new LinkedHashSet<>(indicesPrivileges)); this.applicationPrivileges = Collections.unmodifiableSet(new LinkedHashSet<>(applicationPrivileges)); this.runAsPrivilege = Collections.unmodifiableSet(new LinkedHashSet<>(runAsPrivilege)); } public Set<String> getClusterPrivileges() { return clusterPrivileges; } public Set<GlobalPrivileges> getGlobalPrivileges() { return globalPrivileges; } public Set<UserIndicesPrivileges> getIndicesPrivileges() { return indicesPrivileges; } public Set<ApplicationResourcePrivileges> getApplicationPrivileges() { return applicationPrivileges; } public Set<String> getRunAsPrivilege() { return runAsPrivilege; } @Override public String toString() { return "GetUserPrivilegesResponse{" + "clusterPrivileges=" + clusterPrivileges + ", globalPrivileges=" + globalPrivileges + ", indicesPrivileges=" + indicesPrivileges + ", applicationPrivileges=" + applicationPrivileges + ", runAsPrivilege=" + runAsPrivilege + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final GetUserPrivilegesResponse that = (GetUserPrivilegesResponse) o; return Objects.equals(this.clusterPrivileges, that.clusterPrivileges) && Objects.equals(this.globalPrivileges, that.globalPrivileges) && Objects.equals(this.indicesPrivileges, that.indicesPrivileges) && Objects.equals(this.applicationPrivileges, that.applicationPrivileges) && Objects.equals(this.runAsPrivilege, that.runAsPrivilege); } @Override public int hashCode() { return Objects.hash(clusterPrivileges, globalPrivileges, indicesPrivileges, applicationPrivileges, runAsPrivilege); } }
apache-2.0
yangleo/cloud-github
openstack_dashboard/test/test_data/cinder_data.py
16279
# Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from cinderclient.v2 import availability_zones from cinderclient.v2 import consistencygroups from cinderclient.v2 import pools from cinderclient.v2 import qos_specs from cinderclient.v2 import quotas from cinderclient.v2 import services from cinderclient.v2 import volume_backups as vol_backups from cinderclient.v2 import volume_encryption_types as vol_enc_types from cinderclient.v2 import volume_snapshots as vol_snaps from cinderclient.v2 import volume_transfers from cinderclient.v2 import volume_types from cinderclient.v2 import volumes from openstack_dashboard import api from openstack_dashboard.usage import quotas as usage_quotas from openstack_dashboard.test.test_data import utils def data(TEST): TEST.cinder_services = utils.TestDataContainer() TEST.cinder_volumes = utils.TestDataContainer() TEST.cinder_volume_backups = utils.TestDataContainer() TEST.cinder_volume_encryption_types = utils.TestDataContainer() TEST.cinder_volume_types = utils.TestDataContainer() TEST.cinder_volume_encryption = utils.TestDataContainer() TEST.cinder_bootable_volumes = utils.TestDataContainer() TEST.cinder_qos_specs = utils.TestDataContainer() TEST.cinder_qos_spec_associations = utils.TestDataContainer() TEST.cinder_volume_snapshots = utils.TestDataContainer() TEST.cinder_quotas = utils.TestDataContainer() TEST.cinder_quota_usages = utils.TestDataContainer() TEST.cinder_availability_zones = utils.TestDataContainer() TEST.cinder_volume_transfers = utils.TestDataContainer() TEST.cinder_pools = utils.TestDataContainer() TEST.cinder_consistencygroups = utils.TestDataContainer() TEST.cinder_cgroup_volumes = utils.TestDataContainer() # Services service_1 = services.Service(services.ServiceManager(None), { "service": "cinder-scheduler", "status": "enabled", "binary": "cinder-scheduler", "zone": "internal", "state": "up", "updated_at": "2013-07-08T05:21:00.000000", "host": "devstack001", "disabled_reason": None }) service_2 = services.Service(services.ServiceManager(None), { "service": "cinder-volume", "status": "enabled", "binary": "cinder-volume", "zone": "nova", "state": "up", "updated_at": "2013-07-08T05:20:51.000000", "host": "devstack001", "disabled_reason": None }) TEST.cinder_services.add(service_1) TEST.cinder_services.add(service_2) # Volumes - Cinder v1 volume = volumes.Volume( volumes.VolumeManager(None), {'id': "11023e92-8008-4c8b-8059-7f2293ff3887", 'status': 'available', 'size': 40, 'display_name': 'Volume name', 'display_description': 'Volume description', 'created_at': '2014-01-27 10:30:00', 'volume_type': None, 'attachments': []}) nameless_volume = volumes.Volume( volumes.VolumeManager(None), {"id": "4b069dd0-6eaa-4272-8abc-5448a68f1cce", "status": 'available', "size": 10, "display_name": '', "display_description": '', "device": "/dev/hda", "created_at": '2010-11-21 18:34:25', "volume_type": 'vol_type_1', "attachments": []}) other_volume = volumes.Volume( volumes.VolumeManager(None), {'id': "21023e92-8008-1234-8059-7f2293ff3889", 'status': 'in-use', 'size': 10, 'display_name': u'my_volume', 'display_description': '', 'created_at': '2013-04-01 10:30:00', 'volume_type': None, 'attachments': [{"id": "1", "server_id": '1', "device": "/dev/hda"}]}) volume_with_type = volumes.Volume( volumes.VolumeManager(None), {'id': "7dcb47fd-07d9-42c2-9647-be5eab799ebe", 'name': 'my_volume2', 'status': 'in-use', 'size': 10, 'display_name': u'my_volume2', 'display_description': '', 'created_at': '2013-04-01 10:30:00', 'volume_type': 'vol_type_2', 'attachments': [{"id": "2", "server_id": '2', "device": "/dev/hdb"}]}) non_bootable_volume = volumes.Volume( volumes.VolumeManager(None), {'id': "21023e92-8008-1234-8059-7f2293ff3890", 'status': 'in-use', 'size': 10, 'display_name': u'my_volume', 'display_description': '', 'created_at': '2013-04-01 10:30:00', 'volume_type': None, 'bootable': False, 'attachments': [{"id": "1", "server_id": '1', "device": "/dev/hda"}]}) volume.bootable = 'true' nameless_volume.bootable = 'true' other_volume.bootable = 'true' TEST.cinder_volumes.add(api.cinder.Volume(volume)) TEST.cinder_volumes.add(api.cinder.Volume(nameless_volume)) TEST.cinder_volumes.add(api.cinder.Volume(other_volume)) TEST.cinder_volumes.add(api.cinder.Volume(volume_with_type)) TEST.cinder_bootable_volumes.add(api.cinder.Volume(non_bootable_volume)) vol_type1 = volume_types.VolumeType(volume_types.VolumeTypeManager(None), {'id': u'1', 'name': u'vol_type_1', 'description': 'type 1 description', 'extra_specs': {'foo': 'bar'}}) vol_type2 = volume_types.VolumeType(volume_types.VolumeTypeManager(None), {'id': u'2', 'name': u'vol_type_2', 'description': 'type 2 description'}) TEST.cinder_volume_types.add(vol_type1, vol_type2) # Volumes - Cinder v2 volume_v2 = volumes.Volume( volumes.VolumeManager(None), {'id': "31023e92-8008-4c8b-8059-7f2293ff1234", 'name': 'v2_volume', 'description': "v2 Volume Description", 'status': 'available', 'size': 20, 'created_at': '2014-01-27 10:30:00', 'volume_type': None, 'os-vol-host-attr:host': 'host@backend-name#pool', 'bootable': 'true', 'attachments': []}) volume_v2.bootable = 'true' TEST.cinder_volumes.add(api.cinder.Volume(volume_v2)) snapshot = vol_snaps.Snapshot( vol_snaps.SnapshotManager(None), {'id': '5f3d1c33-7d00-4511-99df-a2def31f3b5d', 'display_name': 'test snapshot', 'display_description': 'volume snapshot', 'size': 40, 'status': 'available', 'volume_id': '11023e92-8008-4c8b-8059-7f2293ff3887'}) snapshot2 = vol_snaps.Snapshot( vol_snaps.SnapshotManager(None), {'id': 'c9d0881a-4c0b-4158-a212-ad27e11c2b0f', 'name': '', 'description': 'v2 volume snapshot description', 'size': 80, 'status': 'available', 'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'}) snapshot3 = vol_snaps.Snapshot( vol_snaps.SnapshotManager(None), {'id': 'c9d0881a-4c0b-4158-a212-ad27e11c2b0e', 'name': '', 'description': 'v2 volume snapshot description 2', 'size': 80, 'status': 'available', 'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'}) snapshot.bootable = 'true' snapshot2.bootable = 'true' TEST.cinder_volume_snapshots.add(api.cinder.VolumeSnapshot(snapshot)) TEST.cinder_volume_snapshots.add(api.cinder.VolumeSnapshot(snapshot2)) TEST.cinder_volume_snapshots.add(api.cinder.VolumeSnapshot(snapshot3)) TEST.cinder_volume_snapshots.first()._volume = volume # Volume Type Encryption vol_enc_type1 = vol_enc_types.VolumeEncryptionType( vol_enc_types.VolumeEncryptionTypeManager(None), {'volume_type_id': u'1', 'control_location': "front-end", 'key_size': 512, 'provider': "a-provider", 'cipher': "a-cipher"}) vol_enc_type2 = vol_enc_types.VolumeEncryptionType( vol_enc_types.VolumeEncryptionTypeManager(None), {'volume_type_id': u'2', 'control_location': "front-end", 'key_size': 256, 'provider': "a-provider", 'cipher': "a-cipher"}) vol_unenc_type1 = vol_enc_types.VolumeEncryptionType( vol_enc_types.VolumeEncryptionTypeManager(None), {}) TEST.cinder_volume_encryption_types.add(vol_enc_type1, vol_enc_type2, vol_unenc_type1) volume_backup1 = vol_backups.VolumeBackup( vol_backups.VolumeBackupManager(None), {'id': 'a374cbb8-3f99-4c3f-a2ef-3edbec842e31', 'name': 'backup1', 'description': 'volume backup 1', 'size': 10, 'status': 'available', 'container_name': 'volumebackups', 'volume_id': '11023e92-8008-4c8b-8059-7f2293ff3887'}) volume_backup2 = vol_backups.VolumeBackup( vol_backups.VolumeBackupManager(None), {'id': 'c321cbb8-3f99-4c3f-a2ef-3edbec842e52', 'name': 'backup2', 'description': 'volume backup 2', 'size': 20, 'status': 'available', 'container_name': 'volumebackups', 'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'}) volume_backup3 = vol_backups.VolumeBackup( vol_backups.VolumeBackupManager(None), {'id': 'c321cbb8-3f99-4c3f-a2ef-3edbec842e53', 'name': 'backup3', 'description': 'volume backup 3', 'size': 20, 'status': 'available', 'container_name': 'volumebackups', 'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'}) TEST.cinder_volume_backups.add(volume_backup1) TEST.cinder_volume_backups.add(volume_backup2) TEST.cinder_volume_backups.add(volume_backup3) # Volume Encryption vol_enc_metadata1 = volumes.Volume( volumes.VolumeManager(None), {'cipher': 'test-cipher', 'key_size': 512, 'provider': 'test-provider', 'control_location': 'front-end'}) vol_unenc_metadata1 = volumes.Volume( volumes.VolumeManager(None), {}) TEST.cinder_volume_encryption.add(vol_enc_metadata1) TEST.cinder_volume_encryption.add(vol_unenc_metadata1) # Quota Sets quota_data = dict(volumes='1', snapshots='1', gigabytes='1000') quota = quotas.QuotaSet(quotas.QuotaSetManager(None), quota_data) TEST.cinder_quotas.add(api.base.QuotaSet(quota)) # Quota Usages quota_usage_data = {'gigabytes': {'used': 0, 'quota': 1000}, 'instances': {'used': 0, 'quota': 10}, 'snapshots': {'used': 0, 'quota': 10}} quota_usage = usage_quotas.QuotaUsage() for k, v in quota_usage_data.items(): quota_usage.add_quota(api.base.Quota(k, v['quota'])) quota_usage.tally(k, v['used']) TEST.cinder_quota_usages.add(quota_usage) # Availability Zones # Cinder returns the following structure from os-availability-zone # {"availabilityZoneInfo": # [{"zoneState": {"available": true}, "zoneName": "nova"}]} # Note that the default zone is still "nova" even though this is cinder TEST.cinder_availability_zones.add( availability_zones.AvailabilityZone( availability_zones.AvailabilityZoneManager(None), { 'zoneName': 'nova', 'zoneState': {'available': True} } ) ) # Cinder Limits limits = {"absolute": {"totalVolumesUsed": 1, "totalGigabytesUsed": 5, "maxTotalVolumeGigabytes": 1000, "maxTotalVolumes": 10}} TEST.cinder_limits = limits # QOS Specs qos_spec1 = qos_specs.QoSSpecs( qos_specs.QoSSpecsManager(None), {"id": "418db45d-6992-4674-b226-80aacad2073c", "name": "high_iops", "consumer": "back-end", "specs": {"minIOPS": "1000", "maxIOPS": '100000'}}) qos_spec2 = qos_specs.QoSSpecs( qos_specs.QoSSpecsManager(None), {"id": "6ed7035f-992e-4075-8ed6-6eff19b3192d", "name": "high_bws", "consumer": "back-end", "specs": {"maxBWS": '5000'}}) TEST.cinder_qos_specs.add(qos_spec1, qos_spec2) vol_type1.associated_qos_spec = qos_spec1.name TEST.cinder_qos_spec_associations.add(vol_type1) # volume_transfers transfer_1 = volume_transfers.VolumeTransfer( volume_transfers.VolumeTransferManager(None), { 'id': '99999999-8888-7777-6666-555555555555', 'name': 'test transfer', 'volume_id': volume.id, 'auth_key': 'blah', 'created_at': ''}) TEST.cinder_volume_transfers.add(transfer_1) # Pools pool1 = pools.Pool( pools.PoolManager(None), { "QoS_support": False, "allocated_capacity_gb": 0, "driver_version": "3.0.0", "free_capacity_gb": 10, "extra_specs": { "description": "LVM Extra specs", "display_name": "LVMDriver", "namespace": "OS::Cinder::LVMDriver", "type": "object", }, "name": "devstack@lvmdriver-1#lvmdriver-1", "pool_name": "lvmdriver-1", "reserved_percentage": 0, "storage_protocol": "iSCSI", "total_capacity_gb": 10, "vendor_name": "Open Source", "volume_backend_name": "lvmdriver-1"}) pool2 = pools.Pool( pools.PoolManager(None), { "QoS_support": False, "allocated_capacity_gb": 2, "driver_version": "3.0.0", "free_capacity_gb": 15, "extra_specs": { "description": "LVM Extra specs", "display_name": "LVMDriver", "namespace": "OS::Cinder::LVMDriver", "type": "object", }, "name": "devstack@lvmdriver-2#lvmdriver-2", "pool_name": "lvmdriver-2", "reserved_percentage": 0, "storage_protocol": "iSCSI", "total_capacity_gb": 10, "vendor_name": "Open Source", "volume_backend_name": "lvmdriver-2"}) TEST.cinder_pools.add(pool1) TEST.cinder_pools.add(pool2) # volume consistency groups cgroup_1 = consistencygroups.Consistencygroup( consistencygroups.ConsistencygroupManager(None), {'id': u'1', 'name': u'cg_1', 'description': 'cg 1 description', 'volume_types': u'1', 'volume_type_names': []}) cgroup_2 = consistencygroups.Consistencygroup( consistencygroups.ConsistencygroupManager(None), {'id': u'2', 'name': u'cg_2', 'description': 'cg 2 description', 'volume_types': u'1', 'volume_type_names': []}) TEST.cinder_consistencygroups.add(cgroup_1) TEST.cinder_consistencygroups.add(cgroup_2) volume_for_consistency_group = volumes.Volume( volumes.VolumeManager(None), {'id': "11023e92-8008-4c8b-8059-7f2293ff3881", 'status': 'available', 'size': 40, 'display_name': 'Volume name', 'display_description': 'Volume description', 'created_at': '2014-01-27 10:30:00', 'volume_type': None, 'attachments': [], 'consistencygroup_id': u'1'}) TEST.cinder_cgroup_volumes.add(api.cinder.Volume( volume_for_consistency_group))
apache-2.0
PHILO-HE/SSM
smart-hadoop-support/smart-hadoop/src/main/java/org/smartdata/hdfs/action/AllDiskFileAction.java
1192
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.smartdata.hdfs.action; import org.smartdata.action.annotation.ActionSignature; /** * An action to do all-disk for a file. */ @ActionSignature( actionId = "alldisk", displayName = "alldisk", usage = HdfsAction.FILE_PATH + " $file " ) public class AllDiskFileAction extends MoveFileAction { @Override public String getStoragePolicy() { return "HOT"; } }
apache-2.0
kay-kim/mongo-java-driver
driver-core/src/main/com/mongodb/connection/SocketStreamHelper.java
2178
/* * Copyright (c) 2008-2014 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb.connection; import com.mongodb.MongoInternalException; import com.mongodb.ServerAddress; import javax.net.ssl.SSLSocket; import java.io.IOException; import java.net.Socket; import static com.mongodb.internal.connection.SslHelper.enableHostNameVerification; import static java.util.concurrent.TimeUnit.MILLISECONDS; final class SocketStreamHelper { static void initialize(final Socket socket, final ServerAddress address, final SocketSettings settings, final SslSettings sslSettings) throws IOException { socket.setTcpNoDelay(true); socket.setSoTimeout(settings.getReadTimeout(MILLISECONDS)); socket.setKeepAlive(settings.isKeepAlive()); if (settings.getReceiveBufferSize() > 0) { socket.setReceiveBufferSize(settings.getReceiveBufferSize()); } if (settings.getSendBufferSize() > 0) { socket.setSendBufferSize(settings.getSendBufferSize()); } if (sslSettings.isEnabled()) { if (!(socket instanceof SSLSocket)) { throw new MongoInternalException("SSL is enabled but the socket is not an instance of javax.net.ssl.SSLSocket"); } if (!sslSettings.isInvalidHostNameAllowed()) { SSLSocket sslSocket = (SSLSocket) socket; sslSocket.setSSLParameters(enableHostNameVerification(sslSocket.getSSLParameters())); } } socket.connect(address.getSocketAddress(), settings.getConnectTimeout(MILLISECONDS)); } private SocketStreamHelper() { } }
apache-2.0
hkernbach/arangodb
Documentation/DocuBlocks/Rest/Cluster/JSF_cluster_test_DELETE.md
207
@startDocuBlock JSF_cluster_test_DELETE @brief executes a cluster roundtrip for sharding @RESTHEADER{DELETE /_admin/cluster-test, Delete cluster roundtrip} @RESTDESCRIPTION See GET method. @endDocuBlock
apache-2.0
ismagarcia/cohash
tmp/cohash-read-only/include/thrust/iterator/detail/minimum_category.h
2681
/* * Copyright 2008-2010 NVIDIA Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <thrust/detail/type_traits.h> namespace thrust { namespace detail { // forward references to lambda placeholders defined in zip_iterator.inl struct _1; struct _2; // // Returns the minimum category type or error_type // if T1 and T2 are unrelated. // template <bool GreaterEqual, bool LessEqual> struct minimum_category_impl //# if BOOST_WORKAROUND(BOOST_MSVC, < 1300) //{ // template <class T1, class T2> struct apply // { // typedef T2 type; // }; // typedef void type; //} //# endif ; template <class T1, class T2> struct error_not_related_by_convertibility; template <> struct minimum_category_impl<true,false> { template <class T1, class T2> struct apply { typedef T2 type; }; // end apply }; // end minimum_category_impl template <> struct minimum_category_impl<false,true> { template <class T1, class T2> struct apply { typedef T1 type; }; // end apply }; // end minimum_category_impl template <> struct minimum_category_impl<true,true> { template <class T1, class T2> struct apply { //BOOST_STATIC_ASSERT((is_same<T1,T2>::value)); typedef T1 type; }; // end apply }; // end minimum_category_impl template <> struct minimum_category_impl<false,false> { template <class T1, class T2> struct apply : error_not_related_by_convertibility<T1,T2> { }; // end apply }; // end minimum_category_impl template <class T1 = _1, class T2 = _2> struct minimum_category { typedef minimum_category_impl< ::thrust::detail::is_convertible<T1,T2>::value , ::thrust::detail::is_convertible<T2,T1>::value > outer; typedef typename outer::template apply<T1,T2> inner; typedef typename inner::type type; //BOOST_MPL_AUX_LAMBDA_SUPPORT(2,minimum_category,(T1,T2)) }; // end minimum_category template <> struct minimum_category<_1,_2> { template <class T1, class T2> struct apply : minimum_category<T1,T2> {}; //BOOST_MPL_AUX_LAMBDA_SUPPORT_SPEC(2,minimum_category,(_1,_2)) }; // end minimum_category } // end detail } // end thrust
apache-2.0
ACRA/acra
web/static/javadoc/5.8.4/acra/org.acra.config/-mail-sender-configuration-builder/enabled.html
3378
<html> <head> <meta name="viewport" content="width=device-width, initial-scale=1" charset="UTF-8"> <title>enabled</title> <link href="../../../../../../images/logo-icon.svg" rel="icon" type="image/svg"><script>var pathToRoot = "../../../";</script><script type="text/javascript" src="../../../scripts/sourceset_dependencies.js" async="async"></script><link href="../../../styles/style.css" rel="Stylesheet"><link href="../../../styles/logo-styles.css" rel="Stylesheet"><link href="../../../styles/jetbrains-mono.css" rel="Stylesheet"><link href="../../../styles/main.css" rel="Stylesheet"><script type="text/javascript" src="../../../scripts/clipboard.js" async="async"></script><script type="text/javascript" src="../../../scripts/navigation-loader.js" async="async"></script><script type="text/javascript" src="../../../scripts/platform-content-handler.js" async="async"></script><script type="text/javascript" src="../../../scripts/main.js" async="async"></script> </head> <body> <div id="container"> <div id="leftColumn"><a href="../../../index.html"> <div id="logo"></div> </a> <div id="paneSearch"></div> <div id="sideMenu"></div> </div> <div id="main"> <div id="leftToggler"><span class="icon-toggler"></span></div> <script type="text/javascript" src="../../../scripts/main.js"></script> <div class="main-content" id="content" pageIds="acra::org.acra.config/MailSenderConfigurationBuilder/enabled/#/PointingToDeclaration//-535716451"> <div class="navigation-wrapper" id="navigation-wrapper"> <div class="breadcrumbs"><a href="../../../index.html">acra</a>/<a href="../index.html">org.acra.config</a>/<a href="index.html">MailSenderConfigurationBuilder</a>/<a href="enabled.html">enabled</a></div> <div class="pull-right d-flex"> <div class="filter-section" id="filter-section"><button class="platform-tag platform-selector jvm-like" data-active="" data-filter=":acra-toast:dokkaHtml/release">androidJvm</button></div> <div id="searchBar"></div> </div> </div> <div class="cover "> <h1 class="cover"><span>enabled</span></h1> </div> <div class="divergent-group" data-filterable-current=":acra-toast:dokkaHtml/release" data-filterable-set=":acra-toast:dokkaHtml/release"><div class="with-platform-tags"><span class="pull-right"></span></div> <div> <div class="platform-hinted " data-platform-hinted="data-platform-hinted"><div class="content sourceset-depenent-content" data-active="" data-togglable=":acra-toast:dokkaHtml/release"><div class="symbol monospace">var <a href="enabled.html">enabled</a>: <a href="https://kotlinlang.org/api/latest/jvm/stdlib/kotlin/-boolean/index.html">Boolean</a><span class="top-right-position"><span class="copy-icon"></span><div class="copy-popup-wrapper popup-to-left"><span class="copy-popup-icon"></span><span>Content copied to clipboard</span></div></span></div></div></div> </div> </div> </div> <div class="footer"><span class="go-to-top-icon"><a href="#content"></a></span><span>© 2021 Copyright</span><span class="pull-right"><span>Generated by </span><a href="https://github.com/Kotlin/dokka"><span>dokka</span><span class="padded-icon"></span></a></span></div> </div> </div> </body> </html>
apache-2.0
wangcy6/storm_app
frame/c++/webrtc-master/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h
1734
/* * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #ifndef MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_ #define MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_ #include <stdint.h> #include <map> #include <memory> #include "api/video_codecs/video_encoder.h" #include "common_video/include/video_bitrate_allocator.h" #include "modules/video_coding/codecs/vp8/temporal_layers.h" #include "rtc_base/constructormagic.h" namespace webrtc { class SimulcastRateAllocator : public VideoBitrateAllocator, public TemporalLayersListener { public: explicit SimulcastRateAllocator( const VideoCodec& codec, std::unique_ptr<TemporalLayersFactory> tl_factory); void OnTemporalLayersCreated(int simulcast_id, TemporalLayers* layers) override; BitrateAllocation GetAllocation(uint32_t total_bitrate_bps, uint32_t framerate) override; uint32_t GetPreferredBitrateBps(uint32_t framerate) override; const VideoCodec& GetCodec() const; private: const VideoCodec codec_; std::map<uint32_t, TemporalLayers*> temporal_layers_; std::unique_ptr<TemporalLayersFactory> tl_factory_; RTC_DISALLOW_COPY_AND_ASSIGN(SimulcastRateAllocator); }; } // namespace webrtc #endif // MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_
apache-2.0
edgardozoppi/GraphX
GraphX.Controls/Controls/VertexLabels/AttachableVertexLabelControl.cs
5630
 using System.ComponentModel; using GraphX.Controls.Models; #if WPF using System.Windows; using DefaultEventArgs = System.EventArgs; using System.Windows.Controls; #elif METRO using Windows.Foundation; using Windows.UI.Xaml; using Windows.UI.Xaml.Data; using DefaultEventArgs = System.Object; #endif using GraphX.PCL.Common.Exceptions; namespace GraphX.Controls { #if METRO [Bindable] #endif public class AttachableVertexLabelControl : VertexLabelControl, IAttachableControl<VertexControl>, INotifyPropertyChanged { /// <summary> /// Gets label attach node /// </summary> public VertexControl AttachNode { get { return (VertexControl)GetValue(AttachNodeProperty); } private set { SetValue(AttachNodeProperty, value); OnPropertyChanged("AttachNode"); } } public static readonly DependencyProperty AttachNodeProperty = DependencyProperty.Register(nameof(AttachNode), typeof(VertexControl), typeof(AttachableVertexLabelControl), new PropertyMetadata(null)); #if WPF static AttachableVertexLabelControl() { DefaultStyleKeyProperty.OverrideMetadata(typeof(AttachableVertexLabelControl), new FrameworkPropertyMetadata(typeof(AttachableVertexLabelControl))); } #endif public AttachableVertexLabelControl() { DataContext = this; #if METRO DefaultStyleKey = typeof(AttachableVertexLabelControl); #endif } /// <summary> /// Attach label to VertexControl /// </summary> /// <param name="node">VertexControl node</param> public virtual void Attach(VertexControl node) { #if WPF if (AttachNode != null) AttachNode.IsVisibleChanged -= AttachNode_IsVisibleChanged; AttachNode = node; AttachNode.IsVisibleChanged += AttachNode_IsVisibleChanged; #elif METRO AttachNode = node; #endif node.AttachLabel(this); } /// <summary> /// Detach label from control /// </summary> public virtual void Detach() { #if WPF if (AttachNode != null) AttachNode.IsVisibleChanged -= AttachNode_IsVisibleChanged; #endif AttachNode = null; } #if WPF void AttachNode_IsVisibleChanged(object sender, DependencyPropertyChangedEventArgs e) { if (AttachNode.IsVisible && AttachNode.ShowLabel) Show(); else if (!AttachNode.IsVisible) { Hide(); } } #endif protected override VertexControl GetVertexControl(DependencyObject parent) { //if(AttachNode == null) // throw new GX_InvalidDataException("AttachableVertexLabelControl node is not attached!"); return AttachNode; } public override void UpdatePosition() { if (double.IsNaN(DesiredSize.Width) || DesiredSize.Width == 0) return; var vc = GetVertexControl(GetParent()); if (vc == null) return; if (LabelPositionMode == VertexLabelPositionMode.Sides) { var vcPos = vc.GetPosition(); Point pt; switch (LabelPositionSide) { case VertexLabelPositionSide.TopRight: pt = new Point(vcPos.X + vc.DesiredSize.Width, vcPos.Y + -DesiredSize.Height); break; case VertexLabelPositionSide.BottomRight: pt = new Point(vcPos.X + vc.DesiredSize.Width, vcPos.Y + vc.DesiredSize.Height); break; case VertexLabelPositionSide.TopLeft: pt = new Point(vcPos.X + -DesiredSize.Width, vcPos.Y + -DesiredSize.Height); break; case VertexLabelPositionSide.BottomLeft: pt = new Point(vcPos.X + -DesiredSize.Width, vcPos.Y + vc.DesiredSize.Height); break; case VertexLabelPositionSide.Top: pt = new Point(vcPos.X + vc.DesiredSize.Width * .5 - DesiredSize.Width * .5, vcPos.Y + -DesiredSize.Height); break; case VertexLabelPositionSide.Bottom: pt = new Point(vcPos.X + vc.DesiredSize.Width * .5 - DesiredSize.Width * .5, vcPos.Y + vc.DesiredSize.Height); break; case VertexLabelPositionSide.Left: pt = new Point(vcPos.X + -DesiredSize.Width, vcPos.Y + vc.DesiredSize.Height * .5f - DesiredSize.Height * .5); break; case VertexLabelPositionSide.Right: pt = new Point(vcPos.X + vc.DesiredSize.Width, vcPos.Y + vc.DesiredSize.Height * .5f - DesiredSize.Height * .5); break; default: throw new GX_InvalidDataException("UpdatePosition() -> Unknown vertex label side!"); } LastKnownRectSize = new Rect(pt, DesiredSize); } else LastKnownRectSize = new Rect(LabelPosition, DesiredSize); Arrange(LastKnownRectSize); } public event PropertyChangedEventHandler PropertyChanged; protected virtual void OnPropertyChanged(string propertyName) { var handler = PropertyChanged; handler?.Invoke(this, new PropertyChangedEventArgs(propertyName)); } } }
apache-2.0
falko/camunda-bpm-platform
engine-rest/engine-rest-jaxrs2/src/test/java-jersey2/org/camunda/bpm/engine/rest/util/container/JerseySpecifics.java
2193
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.util.container; import org.junit.rules.ExternalResource; import org.junit.rules.TestRule; import javax.ws.rs.core.Application; import java.util.HashMap; import java.util.Map; public class JerseySpecifics implements ContainerSpecifics { protected static final TestRuleFactory DEFAULT_RULE_FACTORY = new EmbeddedServerRuleFactory(new JaxrsApplication()); protected static final Map<Class<?>, TestRuleFactory> TEST_RULE_FACTORIES = new HashMap<Class<?>, TestRuleFactory>(); public TestRule getTestRule(Class<?> testClass) { TestRuleFactory ruleFactory = DEFAULT_RULE_FACTORY; if (TEST_RULE_FACTORIES.containsKey(testClass)) { ruleFactory = TEST_RULE_FACTORIES.get(testClass); } return ruleFactory.createTestRule(); } public static class EmbeddedServerRuleFactory implements TestRuleFactory { protected Application jaxRsApplication; public EmbeddedServerRuleFactory(Application jaxRsApplication) { this.jaxRsApplication = jaxRsApplication; } public TestRule createTestRule() { return new ExternalResource() { JerseyServerBootstrap bootstrap = new JerseyServerBootstrap(jaxRsApplication); protected void before() throws Throwable { bootstrap.start(); } protected void after() { bootstrap.stop(); } }; } } }
apache-2.0
hsbhathiya/stratos
extensions/load-balancer/nginx-extension/src/main/java/org/apache/stratos/nginx/extension/NginxConfigWriter.java
7122
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.nginx.extension; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.stratos.load.balancer.common.domain.*; import org.apache.velocity.Template; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.VelocityEngine; import org.apache.velocity.runtime.RuntimeConstants; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.io.StringWriter; import java.util.Collection; /** * Nginx load balancer configuration writer. */ public class NginxConfigWriter { private static final Log log = LogFactory.getLog(Main.class); private static final String NEW_LINE = System.getProperty("line.separator"); private static final String TAB = " "; private String templatePath; private String templateName; private String confFilePath; private String statsSocketFilePath; public NginxConfigWriter(String templatePath, String templateName, String confFilePath, String statsSocketFilePath) { this.templatePath = templatePath; this.templateName = templateName; this.confFilePath = confFilePath; this.statsSocketFilePath = statsSocketFilePath; } public boolean write(Topology topology) { StringBuilder configurationBuilder = new StringBuilder(); for (Service service : topology.getServices()) { for (Cluster cluster : service.getClusters()) { if ((service.getPorts() == null) || (service.getPorts().size() == 0)) { throw new RuntimeException(String.format("No ports found in service: %s", service.getServiceName())); } generateConfigurationForCluster(cluster, service.getPorts(), configurationBuilder); } } // Start velocity engine VelocityEngine ve = new VelocityEngine(); ve.setProperty(RuntimeConstants.FILE_RESOURCE_LOADER_PATH, templatePath); ve.init(); // Open the template Template t = ve.getTemplate(templateName); // Insert strings into the template VelocityContext context = new VelocityContext(); context.put("configuration", configurationBuilder.toString()); // Create a new string from the template StringWriter stringWriter = new StringWriter(); t.merge(context, stringWriter); String configuration = stringWriter.toString(); // Write configuration file try { BufferedWriter writer = new BufferedWriter(new FileWriter(confFilePath)); writer.write(configuration); writer.close(); if (log.isInfoEnabled()) { log.info(String.format("Configuration written to file: %s", confFilePath)); } return true; } catch (IOException e) { if (log.isErrorEnabled()) { log.error(String.format("Could not write configuration file: %s", confFilePath)); } throw new RuntimeException(e); } } /** * Generate configuration for a cluster with the following format: * * <transport> { * upstream <cluster-hostname> { * server <hostname>:<port>; * server <hostname>:<port>; * } * server { * listen <proxy-port>; * server_name <cluster-hostname>; * location / { * proxy_pass http://<cluster-hostname> * } * location /nginx_status { * stub_status on; * access_log off; * allow 127.0.0.1; * deny all; * } * } * } * @param cluster * @param ports * @param text */ private void generateConfigurationForCluster(Cluster cluster, Collection<Port> ports, StringBuilder text) { for (Port port : ports) { for (String hostname : cluster.getHostNames()) { // Start transport block text.append(port.getProtocol()).append(" {").append(NEW_LINE); // Start upstream block text.append(TAB).append("upstream ").append(hostname).append(" {").append(NEW_LINE); for (Member member : cluster.getMembers()) { // Start upstream server block text.append(TAB).append(TAB).append("server ").append(member.getHostName()).append(":") .append(port.getValue()).append(";").append(NEW_LINE); // End upstream server block } text.append(TAB).append("}").append(NEW_LINE); // End upstream block // Start server block text.append(NEW_LINE); text.append(TAB).append("server {").append(NEW_LINE); text.append(TAB).append(TAB).append("listen ").append(port.getProxy()).append(";").append(NEW_LINE); text.append(TAB).append(TAB).append("server_name ").append(hostname).append(";").append(NEW_LINE); text.append(TAB).append(TAB).append("location / {").append(NEW_LINE); text.append(TAB).append(TAB).append(TAB).append("proxy_pass").append(TAB) .append("http://").append(hostname).append(";").append(NEW_LINE); text.append(TAB).append(TAB).append("}").append(NEW_LINE); text.append(TAB).append(TAB).append("location /nginx_status {").append(NEW_LINE); text.append(TAB).append(TAB).append(TAB).append("stub_status on;").append(NEW_LINE); text.append(TAB).append(TAB).append(TAB).append("access_log off;").append(NEW_LINE); text.append(TAB).append(TAB).append(TAB).append("allow 127.0.0.1;").append(NEW_LINE); text.append(TAB).append(TAB).append(TAB).append("deny all;").append(NEW_LINE); text.append(TAB).append(TAB).append("}").append(NEW_LINE); text.append(TAB).append("}").append(NEW_LINE); // End server block text.append("}").append(NEW_LINE); // End transport block } } } }
apache-2.0
llvasconcellos/client
app/src/main/java/org/projectbuendia/client/json/JsonConceptResponse.java
762
// Copyright 2015 The Project Buendia Authors // // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at: http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distrib- // uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES // OR CONDITIONS OF ANY KIND, either express or implied. See the License for // specific language governing permissions and limitations under the License. package org.projectbuendia.client.json; /** A list of concept results returned by the server. */ public class JsonConceptResponse { public JsonConcept[] results; }
apache-2.0
plxaye/chromium
src/remoting/protocol/authentication_method.cc
3345
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "remoting/protocol/authentication_method.h" #include "base/base64.h" #include "base/logging.h" #include "crypto/hmac.h" #include "remoting/protocol/auth_util.h" namespace remoting { namespace protocol { // static AuthenticationMethod AuthenticationMethod::Invalid() { return AuthenticationMethod(); } // static AuthenticationMethod AuthenticationMethod::Spake2(HashFunction hash_function) { return AuthenticationMethod(SPAKE2, hash_function); } // static AuthenticationMethod AuthenticationMethod::ThirdParty() { return AuthenticationMethod(THIRD_PARTY, NONE); } // static AuthenticationMethod AuthenticationMethod::FromString( const std::string& value) { if (value == "spake2_plain") { return Spake2(NONE); } else if (value == "spake2_hmac") { return Spake2(HMAC_SHA256); } else if (value == "third_party") { return ThirdParty(); } else { return AuthenticationMethod::Invalid(); } } // static std::string AuthenticationMethod::ApplyHashFunction( HashFunction hash_function, const std::string& tag, const std::string& shared_secret) { switch (hash_function) { case NONE: return shared_secret; break; case HMAC_SHA256: { crypto::HMAC response(crypto::HMAC::SHA256); if (!response.Init(tag)) { LOG(FATAL) << "HMAC::Init failed"; } unsigned char out_bytes[kSharedSecretHashLength]; if (!response.Sign(shared_secret, out_bytes, sizeof(out_bytes))) { LOG(FATAL) << "HMAC::Sign failed"; } return std::string(out_bytes, out_bytes + sizeof(out_bytes)); } } NOTREACHED(); return shared_secret; } AuthenticationMethod::AuthenticationMethod() : type_(INVALID), hash_function_(NONE) { } AuthenticationMethod::AuthenticationMethod(MethodType type, HashFunction hash_function) : type_(type), hash_function_(hash_function) { DCHECK_NE(type_, INVALID); } AuthenticationMethod::HashFunction AuthenticationMethod::hash_function() const { DCHECK(is_valid()); return hash_function_; } const std::string AuthenticationMethod::ToString() const { DCHECK(is_valid()); if (type_ == THIRD_PARTY) return "third_party"; DCHECK_EQ(type_, SPAKE2); switch (hash_function_) { case NONE: return "spake2_plain"; case HMAC_SHA256: return "spake2_hmac"; } return "invalid"; } bool AuthenticationMethod::operator ==( const AuthenticationMethod& other) const { return type_ == other.type_ && hash_function_ == other.hash_function_; } bool SharedSecretHash::Parse(const std::string& as_string) { size_t separator = as_string.find(':'); if (separator == std::string::npos) return false; std::string function_name = as_string.substr(0, separator); if (function_name == "plain") { hash_function = AuthenticationMethod::NONE; } else if (function_name == "hmac") { hash_function = AuthenticationMethod::HMAC_SHA256; } else { return false; } if (!base::Base64Decode(as_string.substr(separator + 1), &value)) { return false; } return true; } } // namespace protocol } // namespace remoting
apache-2.0
dougwig/acos-client
acos_client/v21/slb/template/persistence.py
2099
# Copyright 2014, Doug Wiegley, A10 Networks. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import acos_client.errors as acos_errors import acos_client.v21.base as base class BasePersistence(base.BaseV21): def __init__(self, client): super(BasePersistence, self).__init__(client) self.prefix = "slb.template.%s_persistence" % self.pers_type def get(self, name, **kwargs): return self._post(("%s.search" % self.prefix), {'name': name}, **kwargs) def exists(self, name, **kwargs): try: self.get(name, **kwargs) return True except acos_errors.NotFound: return False def create(self, name, **kwargs): self._post(("%s.create" % self.prefix), self.get_params(name), **kwargs) def delete(self, name, **kwargs): self._post(("%s.delete" % self.prefix), {'name': name}, **kwargs) class CookiePersistence(BasePersistence): def __init__(self, client): self.pers_type = 'cookie' super(CookiePersistence, self).__init__(client) def get_params(self, name): return { "cookie_persistence_template": { "name": name } } class SourceIpPersistence(BasePersistence): def __init__(self, client): self.pers_type = 'src_ip' super(SourceIpPersistence, self).__init__(client) def get_params(self, name): return { "src_ip_persistence_template": { "name": name } }
apache-2.0
GertiPoppel/kubernetes
pkg/client/unversioned/testclient/fake_resource_quotas.go
2914
/* Copyright 2014 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package testclient import ( "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/api/unversioned" "k8s.io/kubernetes/pkg/fields" "k8s.io/kubernetes/pkg/labels" "k8s.io/kubernetes/pkg/watch" ) // FakeResourceQuotas implements ResourceQuotaInterface. Meant to be embedded into a struct to get a default // implementation. This makes faking out just the methods you want to test easier. type FakeResourceQuotas struct { Fake *Fake Namespace string } func (c *FakeResourceQuotas) Get(name string) (*api.ResourceQuota, error) { obj, err := c.Fake.Invokes(NewGetAction("resourcequotas", c.Namespace, name), &api.ResourceQuota{}) if obj == nil { return nil, err } return obj.(*api.ResourceQuota), err } func (c *FakeResourceQuotas) List(label labels.Selector, field fields.Selector) (*api.ResourceQuotaList, error) { obj, err := c.Fake.Invokes(NewListAction("resourcequotas", c.Namespace, label, field), &api.ResourceQuotaList{}) if obj == nil { return nil, err } return obj.(*api.ResourceQuotaList), err } func (c *FakeResourceQuotas) Create(resourceQuota *api.ResourceQuota) (*api.ResourceQuota, error) { obj, err := c.Fake.Invokes(NewCreateAction("resourcequotas", c.Namespace, resourceQuota), resourceQuota) if obj == nil { return nil, err } return obj.(*api.ResourceQuota), err } func (c *FakeResourceQuotas) Update(resourceQuota *api.ResourceQuota) (*api.ResourceQuota, error) { obj, err := c.Fake.Invokes(NewUpdateAction("resourcequotas", c.Namespace, resourceQuota), resourceQuota) if obj == nil { return nil, err } return obj.(*api.ResourceQuota), err } func (c *FakeResourceQuotas) Delete(name string) error { _, err := c.Fake.Invokes(NewDeleteAction("resourcequotas", c.Namespace, name), &api.ResourceQuota{}) return err } func (c *FakeResourceQuotas) Watch(label labels.Selector, field fields.Selector, opts unversioned.ListOptions) (watch.Interface, error) { return c.Fake.InvokesWatch(NewWatchAction("resourcequotas", c.Namespace, label, field, opts)) } func (c *FakeResourceQuotas) UpdateStatus(resourceQuota *api.ResourceQuota) (*api.ResourceQuota, error) { obj, err := c.Fake.Invokes(NewUpdateSubresourceAction("resourcequotas", "status", c.Namespace, resourceQuota), resourceQuota) if obj == nil { return nil, err } return obj.(*api.ResourceQuota), err }
apache-2.0
indi60/hbase-pmc
target/hbase-0.94.1/hbase-0.94.1/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
7362
/* * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.replication.regionserver; import java.io.IOException; import java.util.NavigableMap; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.regionserver.ReplicationSourceService; import org.apache.hadoop.hbase.regionserver.ReplicationSinkService; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.replication.ReplicationZookeeper; import org.apache.hadoop.hbase.replication.master.ReplicationLogCleaner; import org.apache.hadoop.hbase.util.Bytes; import org.apache.zookeeper.KeeperException; import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS; import static org.apache.hadoop.hbase.HConstants.REPLICATION_ENABLE_KEY; import static org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL; /** * Gateway to Replication. Used by {@link org.apache.hadoop.hbase.regionserver.HRegionServer}. */ public class Replication implements WALActionsListener, ReplicationSourceService, ReplicationSinkService { private boolean replication; private ReplicationSourceManager replicationManager; private final AtomicBoolean replicating = new AtomicBoolean(true); private ReplicationZookeeper zkHelper; private Configuration conf; private ReplicationSink replicationSink; // Hosting server private Server server; /** * Instantiate the replication management (if rep is enabled). * @param server Hosting server * @param fs handle to the filesystem * @param logDir * @param oldLogDir directory where logs are archived * @throws IOException */ public Replication(final Server server, final FileSystem fs, final Path logDir, final Path oldLogDir) throws IOException{ initialize(server, fs, logDir, oldLogDir); } /** * Empty constructor */ public Replication() { } public void initialize(final Server server, final FileSystem fs, final Path logDir, final Path oldLogDir) throws IOException { this.server = server; this.conf = this.server.getConfiguration(); this.replication = isReplication(this.conf); if (replication) { try { this.zkHelper = new ReplicationZookeeper(server, this.replicating); } catch (KeeperException ke) { throw new IOException("Failed replication handler create " + "(replicating=" + this.replicating, ke); } this.replicationManager = new ReplicationSourceManager(zkHelper, conf, this.server, fs, this.replicating, logDir, oldLogDir) ; } else { this.replicationManager = null; this.zkHelper = null; } } /** * @param c Configuration to look at * @return True if replication is enabled. */ public static boolean isReplication(final Configuration c) { return c.getBoolean(REPLICATION_ENABLE_KEY, false); } /* * Returns an object to listen to new hlog changes **/ public WALActionsListener getWALActionsListener() { return this; } /** * Stops replication service. */ public void stopReplicationService() { join(); } /** * Join with the replication threads */ public void join() { if (this.replication) { this.replicationManager.join(); } } /** * Carry on the list of log entries down to the sink * @param entries list of entries to replicate * @throws IOException */ public void replicateLogEntries(HLog.Entry[] entries) throws IOException { if (this.replication) { this.replicationSink.replicateEntries(entries); } } /** * If replication is enabled and this cluster is a master, * it starts * @throws IOException */ public void startReplicationService() throws IOException { if (this.replication) { this.replicationManager.init(); this.replicationSink = new ReplicationSink(this.conf, this.server); } } /** * Get the replication sources manager * @return the manager if replication is enabled, else returns false */ public ReplicationSourceManager getReplicationManager() { return this.replicationManager; } @Override public void visitLogEntryBeforeWrite(HRegionInfo info, HLogKey logKey, WALEdit logEdit) { // Not interested } @Override public void visitLogEntryBeforeWrite(HTableDescriptor htd, HLogKey logKey, WALEdit logEdit) { NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR); byte[] family; for (KeyValue kv : logEdit.getKeyValues()) { family = kv.getFamily(); int scope = htd.getFamily(family).getScope(); if (scope != REPLICATION_SCOPE_LOCAL && !scopes.containsKey(family)) { scopes.put(family, scope); } } if (!scopes.isEmpty()) { logEdit.setScopes(scopes); } } @Override public void preLogRoll(Path oldPath, Path newPath) throws IOException { // Not interested } @Override public void postLogRoll(Path oldPath, Path newPath) throws IOException { getReplicationManager().logRolled(newPath); } @Override public void preLogArchive(Path oldPath, Path newPath) throws IOException { // Not interested } @Override public void postLogArchive(Path oldPath, Path newPath) throws IOException { // Not interested } /** * This method modifies the master's configuration in order to inject * replication-related features * @param conf */ public static void decorateMasterConfiguration(Configuration conf) { if (!isReplication(conf)) { return; } String plugins = conf.get(HBASE_MASTER_LOGCLEANER_PLUGINS); if (!plugins.contains(ReplicationLogCleaner.class.toString())) { conf.set(HBASE_MASTER_LOGCLEANER_PLUGINS, plugins + "," + ReplicationLogCleaner.class.getCanonicalName()); } } @Override public void logRollRequested() { // Not interested } @Override public void logCloseRequested() { // not interested } }
apache-2.0
tiancaiamao/tidb
util/codec/bytes.go
5743
// Copyright 2015 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package codec import ( "encoding/binary" "runtime" "unsafe" "github.com/pingcap/errors" ) const ( encGroupSize = 8 encMarker = byte(0xFF) encPad = byte(0x0) ) var ( pads = make([]byte, encGroupSize) encPads = []byte{encPad} ) // EncodeBytes guarantees the encoded value is in ascending order for comparison, // encoding with the following rule: // [group1][marker1]...[groupN][markerN] // group is 8 bytes slice which is padding with 0. // marker is `0xFF - padding 0 count` // For example: // [] -> [0, 0, 0, 0, 0, 0, 0, 0, 247] // [1, 2, 3] -> [1, 2, 3, 0, 0, 0, 0, 0, 250] // [1, 2, 3, 0] -> [1, 2, 3, 0, 0, 0, 0, 0, 251] // [1, 2, 3, 4, 5, 6, 7, 8] -> [1, 2, 3, 4, 5, 6, 7, 8, 255, 0, 0, 0, 0, 0, 0, 0, 0, 247] // Refer: https://github.com/facebook/mysql-5.6/wiki/MyRocks-record-format#memcomparable-format func EncodeBytes(b []byte, data []byte) []byte { // Allocate more space to avoid unnecessary slice growing. // Assume that the byte slice size is about `(len(data) / encGroupSize + 1) * (encGroupSize + 1)` bytes, // that is `(len(data) / 8 + 1) * 9` in our implement. dLen := len(data) reallocSize := (dLen/encGroupSize + 1) * (encGroupSize + 1) result := reallocBytes(b, reallocSize) for idx := 0; idx <= dLen; idx += encGroupSize { remain := dLen - idx padCount := 0 if remain >= encGroupSize { result = append(result, data[idx:idx+encGroupSize]...) } else { padCount = encGroupSize - remain result = append(result, data[idx:]...) result = append(result, pads[:padCount]...) } marker := encMarker - byte(padCount) result = append(result, marker) } return result } func decodeBytes(b []byte, buf []byte, reverse bool) ([]byte, []byte, error) { if buf == nil { buf = make([]byte, 0, len(b)) } buf = buf[:0] for { if len(b) < encGroupSize+1 { return nil, nil, errors.New("insufficient bytes to decode value") } groupBytes := b[:encGroupSize+1] group := groupBytes[:encGroupSize] marker := groupBytes[encGroupSize] var padCount byte if reverse { padCount = marker } else { padCount = encMarker - marker } if padCount > encGroupSize { return nil, nil, errors.Errorf("invalid marker byte, group bytes %q", groupBytes) } realGroupSize := encGroupSize - padCount buf = append(buf, group[:realGroupSize]...) b = b[encGroupSize+1:] if padCount != 0 { var padByte = encPad if reverse { padByte = encMarker } // Check validity of padding bytes. for _, v := range group[realGroupSize:] { if v != padByte { return nil, nil, errors.Errorf("invalid padding byte, group bytes %q", groupBytes) } } break } } if reverse { reverseBytes(buf) } return b, buf, nil } // DecodeBytes decodes bytes which is encoded by EncodeBytes before, // returns the leftover bytes and decoded value if no error. // `buf` is used to buffer data to avoid the cost of makeslice in decodeBytes when DecodeBytes is called by Decoder.DecodeOne. func DecodeBytes(b []byte, buf []byte) ([]byte, []byte, error) { return decodeBytes(b, buf, false) } // EncodeBytesDesc first encodes bytes using EncodeBytes, then bitwise reverses // encoded value to guarantee the encoded value is in descending order for comparison. func EncodeBytesDesc(b []byte, data []byte) []byte { n := len(b) b = EncodeBytes(b, data) reverseBytes(b[n:]) return b } // DecodeBytesDesc decodes bytes which is encoded by EncodeBytesDesc before, // returns the leftover bytes and decoded value if no error. func DecodeBytesDesc(b []byte, buf []byte) ([]byte, []byte, error) { return decodeBytes(b, buf, true) } // EncodeCompactBytes joins bytes with its length into a byte slice. It is more // efficient in both space and time compare to EncodeBytes. Note that the encoded // result is not memcomparable. func EncodeCompactBytes(b []byte, data []byte) []byte { b = reallocBytes(b, binary.MaxVarintLen64+len(data)) b = EncodeVarint(b, int64(len(data))) return append(b, data...) } // DecodeCompactBytes decodes bytes which is encoded by EncodeCompactBytes before. func DecodeCompactBytes(b []byte) ([]byte, []byte, error) { b, n, err := DecodeVarint(b) if err != nil { return nil, nil, errors.Trace(err) } if int64(len(b)) < n { return nil, nil, errors.Errorf("insufficient bytes to decode value, expected length: %v", n) } return b[n:], b[:n], nil } // See https://golang.org/src/crypto/cipher/xor.go const wordSize = int(unsafe.Sizeof(uintptr(0))) const supportsUnaligned = runtime.GOARCH == "386" || runtime.GOARCH == "amd64" func fastReverseBytes(b []byte) { n := len(b) w := n / wordSize if w > 0 { bw := *(*[]uintptr)(unsafe.Pointer(&b)) for i := 0; i < w; i++ { bw[i] = ^bw[i] } } for i := w * wordSize; i < n; i++ { b[i] = ^b[i] } } func safeReverseBytes(b []byte) { for i := range b { b[i] = ^b[i] } } func reverseBytes(b []byte) { if supportsUnaligned { fastReverseBytes(b) return } safeReverseBytes(b) } // reallocBytes is like realloc. func reallocBytes(b []byte, n int) []byte { newSize := len(b) + n if cap(b) < newSize { bs := make([]byte, len(b), newSize) copy(bs, b) return bs } // slice b has capability to store n bytes return b }
apache-2.0
kyr7/tango-examples-unity
UnityExamples/Assets/TangoPrefabs/Scripts/TangoPointCloud.cs
18770
// <copyright file="TangoPointCloud.cs" company="Google"> // // Copyright 2016 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // </copyright> //----------------------------------------------------------------------- using System; using System.Collections; using System.Collections.Generic; using Tango; using UnityEngine; /// <summary> /// Utility functions for working with and visualizing point cloud data from the /// Tango depth API. Used by the Tango Point Cloud prefab to enable depth point /// functionality. /// </summary> public class TangoPointCloud : MonoBehaviour, ITangoPointCloud { /// <summary> /// If set, the point cloud will be transformed to be in the Area /// Description frame. /// </summary> public bool m_useAreaDescriptionPose; /// <summary> /// If set, update the point cloud's mesh (very slow, useful for debugging). /// </summary> public bool m_updatePointsMesh; /// <summary> /// The points of the point cloud, in world space. /// /// Note that not every member of this array will be filled out. See /// m_pointsCount. /// </summary> [HideInInspector] public Vector3[] m_points; /// <summary> /// The number of points in m_points. /// </summary> [HideInInspector] public int m_pointsCount = 0; /// <summary> /// The average depth (relative to the depth camera). /// </summary> [HideInInspector] public float m_overallZ = 0.0f; /// <summary> /// Time between the last two depth events. /// </summary> [HideInInspector] public float m_depthDeltaTime = 0.0f; /// <summary> /// The position of the floor at y height when FindFloor has been called. /// /// The default value is 0, even if no floor has been found. When FindFloor has completed successfully, /// the result is assigned here. /// </summary> [HideInInspector] public float m_floorPlaneY = 0.0f; /// <summary> /// Check if a floor has been found. /// /// The value is <c>true</c> if the method FindFloor has successfully found a floor, which is assigned /// to m_floorPlaneY. The value is always <c>false</c> if FindFloor has not been called. /// </summary> [HideInInspector] public bool m_floorFound = false; /// <summary> /// The maximum points displayed. Just some constant value. /// </summary> private const int MAX_POINT_COUNT = 61440; /// <summary> /// The minimum number of points near a world position y to determine that it is a reasonable floor. /// </summary> private const int RECOGNITION_THRESHOLD = 1000; /// <summary> /// The minimum number of points near a world position y to determine that it is not simply noise points. /// </summary> private const int NOISE_THRESHOLD = 500; /// <summary> /// The interval in meters between buckets of points. For example, a high sensitivity of 0.01 will group /// points into buckets every 1cm. /// </summary> private const float SENSITIVITY = 0.02f; private TangoApplication m_tangoApplication; // Matrices for transforming pointcloud to world coordinates. // This equation will take account of the camera sensors extrinsic. // Full equation is: // Matrix4x4 unityWorldTDepthCamera = // m_unityWorldTStartService * startServiceTDevice * Matrix4x4.Inverse(m_imuTDevice) * m_imuTDepthCamera; private Matrix4x4 m_unityWorldTStartService; private Matrix4x4 m_imuTDevice; private Matrix4x4 m_imuTDepthCamera; // Matrix for transforming the Unity camera space to the color camera space. private Matrix4x4 m_colorCameraTUnityCamera; /// <summary> /// Color camera intrinsics. /// </summary> private TangoCameraIntrinsics m_colorCameraIntrinsics; /// <summary> /// If the camera data has already been set up. /// </summary> private bool m_cameraDataSetUp; /// <summary> /// The Tango timestamp from the last update of m_points. /// </summary> private double m_depthTimestamp; /// <summary> /// Mesh this script will modify. /// </summary> private Mesh m_mesh; private Renderer m_renderer; // Pose controller from which the offset is queried. private TangoDeltaPoseController m_tangoDeltaPoseController; /// <summary> /// Set to <c>true</c> when currently attempting to find a floor using depth points, <c>false</c> when not /// floor finding. /// </summary> private bool m_findFloorWithDepth = false; /// <summary> /// Used for floor finding, container for the number of points that fall into a y bucket within a sensitivity range. /// </summary> private Dictionary<float, int> m_numPointsAtY; /// <summary> /// Used for floor finding, the list of y value buckets that have sufficient points near that y position height /// to determine that it not simply noise. /// </summary> private List<float> m_nonNoiseBuckets; /// @cond /// <summary> /// Use this for initialization. /// </summary> public void Start() { m_tangoApplication = FindObjectOfType<TangoApplication>(); m_tangoApplication.Register(this); m_tangoDeltaPoseController = FindObjectOfType<TangoDeltaPoseController>(); m_unityWorldTStartService.SetColumn(0, new Vector4(1.0f, 0.0f, 0.0f, 0.0f)); m_unityWorldTStartService.SetColumn(1, new Vector4(0.0f, 0.0f, 1.0f, 0.0f)); m_unityWorldTStartService.SetColumn(2, new Vector4(0.0f, 1.0f, 0.0f, 0.0f)); m_unityWorldTStartService.SetColumn(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f)); // Constant matrix converting Unity world frame frame to device frame. m_colorCameraTUnityCamera.SetColumn(0, new Vector4(1.0f, 0.0f, 0.0f, 0.0f)); m_colorCameraTUnityCamera.SetColumn(1, new Vector4(0.0f, -1.0f, 0.0f, 0.0f)); m_colorCameraTUnityCamera.SetColumn(2, new Vector4(0.0f, 0.0f, 1.0f, 0.0f)); m_colorCameraTUnityCamera.SetColumn(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f)); // Assign triangles, note: this is just for visualizing point in the mesh data. m_points = new Vector3[MAX_POINT_COUNT]; m_mesh = GetComponent<MeshFilter>().mesh; m_mesh.Clear(); // Points used for finding floor plane. m_numPointsAtY = new Dictionary<float, int>(); m_nonNoiseBuckets = new List<float>(); m_renderer = GetComponent<Renderer>(); } /// <summary> /// Unity callback when the component gets destroyed. /// </summary> public void OnDestroy() { m_tangoApplication.Unregister(this); } /// <summary> /// Callback that gets called when depth is available from the Tango Service. /// </summary> /// <param name="pointCloud">Depth information from Tango.</param> public void OnTangoPointCloudAvailable(TangoPointCloudData pointCloud) { // Calculate the time since the last successful depth data // collection. if (m_depthTimestamp != 0.0) { m_depthDeltaTime = (float)((pointCloud.m_timestamp - m_depthTimestamp) * 1000.0); } // Fill in the data to draw the point cloud. m_pointsCount = pointCloud.m_numPoints; if (m_pointsCount > 0) { _SetUpCameraData(); TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query pose to transform point cloud to world coordinates, here we are using the timestamp // that we get from depth. if (m_useAreaDescriptionPose) { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_AREA_DESCRIPTION; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } else { pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_START_OF_SERVICE; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; } PoseProvider.GetPoseAtTime(poseData, pointCloud.m_timestamp, pair); if (poseData.status_code != TangoEnums.TangoPoseStatusType.TANGO_POSE_VALID) { return; } Matrix4x4 startServiceTDevice = poseData.ToMatrix4x4(); // The transformation matrix that represents the pointcloud's pose. // Explanation: // The pointcloud which is in Depth camera's frame, is put in unity world's // coordinate system(wrt unity world). // Then we are extracting the position and rotation from uwTuc matrix and applying it to // the PointCloud's transform. Matrix4x4 unityWorldTDepthCamera = m_unityWorldTStartService * startServiceTDevice * Matrix4x4.Inverse(m_imuTDevice) * m_imuTDepthCamera; transform.position = Vector3.zero; transform.rotation = Quaternion.identity; // Add offset to the pointcloud depending on the offset from TangoDeltaPoseController Matrix4x4 unityWorldOffsetTDepthCamera; if (m_tangoDeltaPoseController != null) { unityWorldOffsetTDepthCamera = m_tangoDeltaPoseController.UnityWorldOffset * unityWorldTDepthCamera; } else { unityWorldOffsetTDepthCamera = unityWorldTDepthCamera; } // Converting points array to world space. m_overallZ = 0; for (int i = 0; i < m_pointsCount; ++i) { Vector3 point = pointCloud[i]; m_points[i] = unityWorldOffsetTDepthCamera.MultiplyPoint3x4(point); m_overallZ += point.z; } m_overallZ = m_overallZ / m_pointsCount; m_depthTimestamp = pointCloud.m_timestamp; if (m_updatePointsMesh) { // Need to update indicies too! int[] indices = new int[m_pointsCount]; for (int i = 0; i < m_pointsCount; ++i) { indices[i] = i; } m_mesh.Clear(); m_mesh.vertices = m_points; m_mesh.SetIndices(indices, MeshTopology.Points, 0); } // The color should be pose relative, we need to store enough info to go back to pose values. m_renderer.material.SetMatrix("depthCameraTUnityWorld", unityWorldOffsetTDepthCamera.inverse); // Try to find the floor using this set of depth points if requested. if (m_findFloorWithDepth) { _FindFloorWithDepth(); } } else { m_overallZ = 0; } } /// @endcond /// <summary> /// Finds the closest point from a point cloud to a position on screen. /// /// This function is slow, as it looks at every single point in the point /// cloud. Avoid calling this more than once a frame. /// </summary> /// <returns>The index of the closest point, or -1 if not found.</returns> /// <param name="cam">The current camera.</param> /// <param name="pos">Position on screen (in pixels).</param> /// <param name="maxDist">The maximum pixel distance to allow.</param> public int FindClosestPoint(Camera cam, Vector2 pos, int maxDist) { int bestIndex = -1; float bestDistSqr = 0; for (int it = 0; it < m_pointsCount; ++it) { Vector3 screenPos3 = cam.WorldToScreenPoint(m_points[it]); Vector2 screenPos = new Vector2(screenPos3.x, screenPos3.y); float distSqr = Vector2.SqrMagnitude(screenPos - pos); if (distSqr > maxDist * maxDist) { continue; } if (bestIndex == -1 || distSqr < bestDistSqr) { bestIndex = it; bestDistSqr = distSqr; } } return bestIndex; } /// <summary> /// Given a screen coordinate, finds a plane that most closely fits the /// depth values in that area. /// /// This function is slow, as it looks at every single point in the point /// cloud. Avoid calling this more than once a frame. This also assumes the /// Unity camera intrinsics match the device's color camera. /// </summary> /// <returns><c>true</c>, if a plane was found; <c>false</c> otherwise.</returns> /// <param name="cam">The Unity camera.</param> /// <param name="pos">The point in screen space to perform detection on.</param> /// <param name="planeCenter">Filled in with the center of the plane in Unity world space.</param> /// <param name="plane">Filled in with a model of the plane in Unity world space.</param> public bool FindPlane(Camera cam, Vector2 pos, out Vector3 planeCenter, out Plane plane) { if (m_pointsCount == 0) { // No points to check, maybe not connected to the service yet planeCenter = Vector3.zero; plane = new Plane(); return false; } Matrix4x4 colorCameraTUnityWorld = m_colorCameraTUnityCamera * cam.transform.worldToLocalMatrix; Vector2 normalizedPos = cam.ScreenToViewportPoint(pos); // If the camera has a TangoARScreen attached, it is not displaying the entire color camera image. Correct // the normalized coordinates by taking the clipping into account. TangoARScreen arScreen = cam.gameObject.GetComponent<TangoARScreen>(); if (arScreen != null) { normalizedPos = arScreen.ViewportPointToCameraImagePoint(normalizedPos); } TangoCameraIntrinsics alignedIntrinsics = new TangoCameraIntrinsics(); VideoOverlayProvider.GetDeviceOientationAlignedIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, alignedIntrinsics); int returnValue = TangoSupport.FitPlaneModelNearClick( m_points, m_pointsCount, m_depthTimestamp, alignedIntrinsics, ref colorCameraTUnityWorld, normalizedPos, out planeCenter, out plane); if (returnValue == Common.ErrorType.TANGO_SUCCESS) { return true; } else { return false; } } /// <summary> /// Start processing the point cloud depth points to find the position of the floor. /// </summary> public void FindFloor() { m_floorFound = false; m_findFloorWithDepth = true; m_floorPlaneY = 0.0f; } /// <summary> /// Sets up extrinsic matrixes and camera intrinsics for this hardware. /// </summary> private void _SetUpCameraData() { if (m_cameraDataSetUp) { return; } double timestamp = 0.0; TangoCoordinateFramePair pair; TangoPoseData poseData = new TangoPoseData(); // Query the extrinsics between IMU and device frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_DEVICE; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); m_imuTDevice = poseData.ToMatrix4x4(); // Query the extrinsics between IMU and depth camera frame. pair.baseFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_IMU; pair.targetFrame = TangoEnums.TangoCoordinateFrameType.TANGO_COORDINATE_FRAME_CAMERA_DEPTH; PoseProvider.GetPoseAtTime(poseData, timestamp, pair); m_imuTDepthCamera = poseData.ToMatrix4x4(); // Also get the camera intrinsics m_colorCameraIntrinsics = new TangoCameraIntrinsics(); VideoOverlayProvider.GetIntrinsics(TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR, m_colorCameraIntrinsics); m_cameraDataSetUp = true; } /// <summary> /// Use the last received set of depth points to find a reasonable floor. /// </summary> private void _FindFloorWithDepth() { m_numPointsAtY.Clear(); m_nonNoiseBuckets.Clear(); // Count each depth point into a bucket based on its world position y value. for (int i = 0; i < m_pointsCount; i++) { Vector3 point = m_points[i]; if (!point.Equals(Vector3.zero)) { // Group similar points into buckets based on sensitivity. float roundedY = Mathf.Round(point.y / SENSITIVITY) * SENSITIVITY; if (!m_numPointsAtY.ContainsKey(roundedY)) { m_numPointsAtY.Add(roundedY, 0); } m_numPointsAtY[roundedY]++; // Check if the y plane is a non-noise plane. if (m_numPointsAtY[roundedY] > NOISE_THRESHOLD && !m_nonNoiseBuckets.Contains(roundedY)) { m_nonNoiseBuckets.Add(roundedY); } } } // Find a plane at the y value. The y value must be below the camera y position. m_nonNoiseBuckets.Sort(); for (int i = 0; i < m_nonNoiseBuckets.Count; i++) { float yBucket = m_nonNoiseBuckets[i]; int numPoints = m_numPointsAtY[yBucket]; if (numPoints > RECOGNITION_THRESHOLD && yBucket < Camera.main.transform.position.y) { // Reject the plane if it is not the lowest. if (yBucket > m_nonNoiseBuckets[0]) { return; } m_floorFound = true; m_findFloorWithDepth = false; m_floorPlaneY = yBucket; m_numPointsAtY.Clear(); m_nonNoiseBuckets.Clear(); } } } }
apache-2.0
mk-5/gdx-fireapp
e2e/ios/libs/FirebaseDatabase.framework/Headers/FIRDatabaseQuery.h
16630
/* * Copyright 2017 Google * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #import "FIRDataEventType.h" #import "FIRDataSnapshot.h" #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN /** * A FIRDatabaseHandle is used to identify listeners of Firebase Database * events. These handles are returned by observeEventType: and can later be * passed to removeObserverWithHandle: to stop receiving updates. */ typedef NSUInteger FIRDatabaseHandle NS_SWIFT_NAME(DatabaseHandle); /** * A FIRDatabaseQuery instance represents a query over the data at a particular * location. * * You create one by calling one of the query methods (queryOrderedByChild:, * queryStartingAtValue:, etc.) on a FIRDatabaseReference. The query methods can * be chained to further specify the data you are interested in observing */ NS_SWIFT_NAME(DatabaseQuery) @interface FIRDatabaseQuery : NSObject #pragma mark - Attach observers to read data /** * observeEventType:withBlock: is used to listen for data changes at a * particular location. This is the primary way to read data from the Firebase * Database. Your block will be triggered for the initial data and again * whenever the data changes. * * Use removeObserverWithHandle: to stop receiving updates. * * @param eventType The type of event to listen for. * @param block The block that should be called with initial data and updates. * It is passed the data as a FIRDataSnapshot. * @return A handle used to unregister this block later using * removeObserverWithHandle: */ - (FIRDatabaseHandle)observeEventType:(FIRDataEventType)eventType withBlock: (void (^)(FIRDataSnapshot *snapshot))block; /** * observeEventType:andPreviousSiblingKeyWithBlock: is used to listen for data * changes at a particular location. This is the primary way to read data from * the Firebase Database. Your block will be triggered for the initial data and * again whenever the data changes. In addition, for FIRDataEventTypeChildAdded, * FIRDataEventTypeChildMoved, and FIRDataEventTypeChildChanged events, your * block will be passed the key of the previous node by priority order. * * Use removeObserverWithHandle: to stop receiving updates. * * @param eventType The type of event to listen for. * @param block The block that should be called with initial data and updates. * It is passed the data as a FIRDataSnapshot and the previous child's key. * @return A handle used to unregister this block later using * removeObserverWithHandle: */ - (FIRDatabaseHandle)observeEventType:(FIRDataEventType)eventType andPreviousSiblingKeyWithBlock: (void (^)(FIRDataSnapshot *snapshot, NSString *__nullable prevKey))block; /** * observeEventType:withBlock: is used to listen for data changes at a * particular location. This is the primary way to read data from the Firebase * Database. Your block will be triggered for the initial data and again * whenever the data changes. * * The cancelBlock will be called if you will no longer receive new events due * to no longer having permission. * * Use removeObserverWithHandle: to stop receiving updates. * * @param eventType The type of event to listen for. * @param block The block that should be called with initial data and updates. * It is passed the data as a FIRDataSnapshot. * @param cancelBlock The block that should be called if this client no longer * has permission to receive these events * @return A handle used to unregister this block later using * removeObserverWithHandle: */ - (FIRDatabaseHandle)observeEventType:(FIRDataEventType)eventType withBlock:(void (^)(FIRDataSnapshot *snapshot))block withCancelBlock: (nullable void (^)(NSError *error))cancelBlock; /** * observeEventType:andPreviousSiblingKeyWithBlock: is used to listen for data * changes at a particular location. This is the primary way to read data from * the Firebase Database. Your block will be triggered for the initial data and * again whenever the data changes. In addition, for FIRDataEventTypeChildAdded, * FIRDataEventTypeChildMoved, and FIRDataEventTypeChildChanged events, your * block will be passed the key of the previous node by priority order. * * The cancelBlock will be called if you will no longer receive new events due * to no longer having permission. * * Use removeObserverWithHandle: to stop receiving updates. * * @param eventType The type of event to listen for. * @param block The block that should be called with initial data and updates. * It is passed the data as a FIRDataSnapshot and the previous child's key. * @param cancelBlock The block that should be called if this client no longer * has permission to receive these events * @return A handle used to unregister this block later using * removeObserverWithHandle: */ - (FIRDatabaseHandle)observeEventType:(FIRDataEventType)eventType andPreviousSiblingKeyWithBlock: (void (^)(FIRDataSnapshot *snapshot, NSString *__nullable prevKey))block withCancelBlock: (nullable void (^)(NSError *error))cancelBlock; /** * This is equivalent to observeEventType:withBlock:, except the block is * immediately canceled after the initial data is returned. * * @param eventType The type of event to listen for. * @param block The block that should be called. It is passed the data as a * FIRDataSnapshot. */ - (void)observeSingleEventOfType:(FIRDataEventType)eventType withBlock:(void (^)(FIRDataSnapshot *snapshot))block; /** * This is equivalent to observeEventType:withBlock:, except the block is * immediately canceled after the initial data is returned. In addition, for * FIRDataEventTypeChildAdded, FIRDataEventTypeChildMoved, and * FIRDataEventTypeChildChanged events, your block will be passed the key of the * previous node by priority order. * * @param eventType The type of event to listen for. * @param block The block that should be called. It is passed the data as a * FIRDataSnapshot and the previous child's key. */ - (void)observeSingleEventOfType:(FIRDataEventType)eventType andPreviousSiblingKeyWithBlock: (void (^)(FIRDataSnapshot *snapshot, NSString *__nullable prevKey))block; /** * This is equivalent to observeEventType:withBlock:, except the block is * immediately canceled after the initial data is returned. * * The cancelBlock will be called if you do not have permission to read data at * this location. * * @param eventType The type of event to listen for. * @param block The block that should be called. It is passed the data as a * FIRDataSnapshot. * @param cancelBlock The block that will be called if you don't have permission * to access this data */ - (void)observeSingleEventOfType:(FIRDataEventType)eventType withBlock:(void (^)(FIRDataSnapshot *snapshot))block withCancelBlock:(nullable void (^)(NSError *error))cancelBlock; /** * This is equivalent to observeEventType:withBlock:, except the block is * immediately canceled after the initial data is returned. In addition, for * FIRDataEventTypeChildAdded, FIRDataEventTypeChildMoved, and * FIRDataEventTypeChildChanged events, your block will be passed the key of the * previous node by priority order. * * The cancelBlock will be called if you do not have permission to read data at * this location. * * @param eventType The type of event to listen for. * @param block The block that should be called. It is passed the data as a * FIRDataSnapshot and the previous child's key. * @param cancelBlock The block that will be called if you don't have permission * to access this data */ - (void)observeSingleEventOfType:(FIRDataEventType)eventType andPreviousSiblingKeyWithBlock:(void (^)(FIRDataSnapshot *snapshot, NSString *__nullable prevKey))block withCancelBlock: (nullable void (^)(NSError *error))cancelBlock; #pragma mark - Detaching observers /** * Detach a block previously attached with observeEventType:withBlock:. * * @param handle The handle returned by the call to observeEventType:withBlock: * which we are trying to remove. */ - (void)removeObserverWithHandle:(FIRDatabaseHandle)handle; /** * Detach all blocks previously attached to this Firebase Database location with * observeEventType:withBlock: */ - (void)removeAllObservers; /** * By calling `keepSynced:YES` on a location, the data for that location will * automatically be downloaded and kept in sync, even when no listeners are * attached for that location. Additionally, while a location is kept synced, it * will not be evicted from the persistent disk cache. * * @param keepSynced Pass YES to keep this location synchronized, pass NO to * stop synchronization. */ - (void)keepSynced:(BOOL)keepSynced; #pragma mark - Querying and limiting /** * queryLimitedToFirst: is used to generate a reference to a limited view of the * data at this location. The FIRDatabaseQuery instance returned by * queryLimitedToFirst: will respond to at most the first limit child nodes. * * @param limit The upper bound, inclusive, for the number of child nodes to * receive events for * @return A FIRDatabaseQuery instance, limited to at most limit child nodes. */ - (FIRDatabaseQuery *)queryLimitedToFirst:(NSUInteger)limit; /** * queryLimitedToLast: is used to generate a reference to a limited view of the * data at this location. The FIRDatabaseQuery instance returned by * queryLimitedToLast: will respond to at most the last limit child nodes. * * @param limit The upper bound, inclusive, for the number of child nodes to * receive events for * @return A FIRDatabaseQuery instance, limited to at most limit child nodes. */ - (FIRDatabaseQuery *)queryLimitedToLast:(NSUInteger)limit; /** * queryOrderBy: is used to generate a reference to a view of the data that's * been sorted by the values of a particular child key. This method is intended * to be used in combination with queryStartingAtValue:, queryEndingAtValue:, or * queryEqualToValue:. * * @param key The child key to use in ordering data visible to the returned * FIRDatabaseQuery * @return A FIRDatabaseQuery instance, ordered by the values of the specified * child key. */ - (FIRDatabaseQuery *)queryOrderedByChild:(NSString *)key; /** * queryOrderedByKey: is used to generate a reference to a view of the data * that's been sorted by child key. This method is intended to be used in * combination with queryStartingAtValue:, queryEndingAtValue:, or * queryEqualToValue:. * * @return A FIRDatabaseQuery instance, ordered by child keys. */ - (FIRDatabaseQuery *)queryOrderedByKey; /** * queryOrderedByValue: is used to generate a reference to a view of the data * that's been sorted by child value. This method is intended to be used in * combination with queryStartingAtValue:, queryEndingAtValue:, or * queryEqualToValue:. * * @return A FIRDatabaseQuery instance, ordered by child value. */ - (FIRDatabaseQuery *)queryOrderedByValue; /** * queryOrderedByPriority: is used to generate a reference to a view of the data * that's been sorted by child priority. This method is intended to be used in * combination with queryStartingAtValue:, queryEndingAtValue:, or * queryEqualToValue:. * * @return A FIRDatabaseQuery instance, ordered by child priorities. */ - (FIRDatabaseQuery *)queryOrderedByPriority; /** * queryStartingAtValue: is used to generate a reference to a limited view of * the data at this location. The FIRDatabaseQuery instance returned by * queryStartingAtValue: will respond to events at nodes with a value greater * than or equal to startValue. * * @param startValue The lower bound, inclusive, for the value of data visible * to the returned FIRDatabaseQuery * @return A FIRDatabaseQuery instance, limited to data with value greater than * or equal to startValue */ - (FIRDatabaseQuery *)queryStartingAtValue:(nullable id)startValue; /** * queryStartingAtValue:childKey: is used to generate a reference to a limited * view of the data at this location. The FIRDatabaseQuery instance returned by * queryStartingAtValue:childKey will respond to events at nodes with a value * greater than startValue, or equal to startValue and with a key greater than * or equal to childKey. This is most useful when implementing pagination in a * case where multiple nodes can match the startValue. * * @param startValue The lower bound, inclusive, for the value of data visible * to the returned FIRDatabaseQuery * @param childKey The lower bound, inclusive, for the key of nodes with value * equal to startValue * @return A FIRDatabaseQuery instance, limited to data with value greater than * or equal to startValue */ - (FIRDatabaseQuery *)queryStartingAtValue:(nullable id)startValue childKey:(nullable NSString *)childKey; /** * queryEndingAtValue: is used to generate a reference to a limited view of the * data at this location. The FIRDatabaseQuery instance returned by * queryEndingAtValue: will respond to events at nodes with a value less than or * equal to endValue. * * @param endValue The upper bound, inclusive, for the value of data visible to * the returned FIRDatabaseQuery * @return A FIRDatabaseQuery instance, limited to data with value less than or * equal to endValue */ - (FIRDatabaseQuery *)queryEndingAtValue:(nullable id)endValue; /** * queryEndingAtValue:childKey: is used to generate a reference to a limited * view of the data at this location. The FIRDatabaseQuery instance returned by * queryEndingAtValue:childKey will respond to events at nodes with a value less * than endValue, or equal to endValue and with a key less than or equal to * childKey. This is most useful when implementing pagination in a case where * multiple nodes can match the endValue. * * @param endValue The upper bound, inclusive, for the value of data visible to * the returned FIRDatabaseQuery * @param childKey The upper bound, inclusive, for the key of nodes with value * equal to endValue * @return A FIRDatabaseQuery instance, limited to data with value less than or * equal to endValue */ - (FIRDatabaseQuery *)queryEndingAtValue:(nullable id)endValue childKey:(nullable NSString *)childKey; /** * queryEqualToValue: is used to generate a reference to a limited view of the * data at this location. The FIRDatabaseQuery instance returned by * queryEqualToValue: will respond to events at nodes with a value equal to the * supplied argument. * * @param value The value that the data returned by this FIRDatabaseQuery will * have * @return A FIRDatabaseQuery instance, limited to data with the supplied value. */ - (FIRDatabaseQuery *)queryEqualToValue:(nullable id)value; /** * queryEqualToValue:childKey: is used to generate a reference to a limited view * of the data at this location. The FIRDatabaseQuery instance returned by * queryEqualToValue:childKey will respond to events at nodes with a value equal * to the supplied argument and with their key equal to childKey. There will be * at most one node that matches because child keys are unique. * * @param value The value that the data returned by this FIRDatabaseQuery will * have * @param childKey The name of nodes with the right value * @return A FIRDatabaseQuery instance, limited to data with the supplied value * and the key. */ - (FIRDatabaseQuery *)queryEqualToValue:(nullable id)value childKey:(nullable NSString *)childKey; #pragma mark - Properties /** * Gets a FIRDatabaseReference for the location of this query. * * @return A FIRDatabaseReference for the location of this query. */ @property(nonatomic, readonly, strong) FIRDatabaseReference *ref; @end NS_ASSUME_NONNULL_END
apache-2.0
biospi/seamass-windeps
src/boost_1_57_0/doc/html/boost/xpressive/regex_constants/match_flag_type.html
12608
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>Type match_flag_type</title> <link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset"> <link rel="up" href="../../../xpressive/reference.html#header.boost.xpressive.regex_constants_hpp" title="Header &lt;boost/xpressive/regex_constants.hpp&gt;"> <link rel="prev" href="syntax_option_type.html" title="Type syntax_option_type"> <link rel="next" href="error_type.html" title="Type error_type"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td> <td align="center"><a href="../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="syntax_option_type.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../../xpressive/reference.html#header.boost.xpressive.regex_constants_hpp"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="error_type.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="refentry"> <a name="boost.xpressive.regex_constants.match_flag_type"></a><div class="titlepage"></div> <div class="refnamediv"> <h2><span class="refentrytitle">Type match_flag_type</span></h2> <p>boost::xpressive::regex_constants::match_flag_type</p> </div> <h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2> <div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: &lt;<a class="link" href="../../../xpressive/reference.html#header.boost.xpressive.regex_constants_hpp" title="Header &lt;boost/xpressive/regex_constants.hpp&gt;">boost/xpressive/regex_constants.hpp</a>&gt; </span> <span class="keyword">enum</span> <span class="identifier">match_flag_type</span> <span class="special">{</span> <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_default">match_default</a> = = 0, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_not_bol">match_not_bol</a> = = 1 &lt;&lt; 1, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_not_eol">match_not_eol</a> = = 1 &lt;&lt; 2, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_not_bow">match_not_bow</a> = = 1 &lt;&lt; 3, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_not_eow">match_not_eow</a> = = 1 &lt;&lt; 4, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_any">match_any</a> = = 1 &lt;&lt; 7, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_not_null">match_not_null</a> = = 1 &lt;&lt; 8, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_continuous">match_continuous</a> = = 1 &lt;&lt; 10, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_partial">match_partial</a> = = 1 &lt;&lt; 11, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.match_prev_avail">match_prev_avail</a> = = 1 &lt;&lt; 12, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.format_default">format_default</a> = = 0, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.format_sed">format_sed</a> = = 1 &lt;&lt; 13, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.format_perl">format_perl</a> = = 1 &lt;&lt; 14, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.format_no_copy">format_no_copy</a> = = 1 &lt;&lt; 15, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.format_first_only">format_first_only</a> = = 1 &lt;&lt; 16, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.format_literal">format_literal</a> = = 1 &lt;&lt; 17, <a class="link" href="match_flag_type.html#boost.xpressive.regex_constants.match_flag_type.format_all">format_all</a> = = 1 &lt;&lt; 18 <span class="special">}</span><span class="special">;</span></pre></div> <div class="refsect1"> <a name="idp536420912"></a><h2>Description</h2> <p>Flags used to customize the behavior of the regex algorithms </p> <div class="variablelist"><dl class="variablelist compact"> <dt><span class="term"><code class="computeroutput">match_default</code><a name="boost.xpressive.regex_constants.match_flag_type.match_default"></a></span></dt> <dd><p>Specifies that matching of regular expressions proceeds without any modification of the normal rules used in ECMA-262, ECMAScript Language Specification, Chapter 15 part 10, RegExp (Regular Expression) Objects (FWD.1) </p></dd> <dt><span class="term"><code class="computeroutput">match_not_bol</code><a name="boost.xpressive.regex_constants.match_flag_type.match_not_bol"></a></span></dt> <dd><p>Specifies that the expression "^" should not be matched against the sub-sequence [first,first). </p></dd> <dt><span class="term"><code class="computeroutput">match_not_eol</code><a name="boost.xpressive.regex_constants.match_flag_type.match_not_eol"></a></span></dt> <dd><p>Specifies that the expression "\$" should not be matched against the sub-sequence [last,last). </p></dd> <dt><span class="term"><code class="computeroutput">match_not_bow</code><a name="boost.xpressive.regex_constants.match_flag_type.match_not_bow"></a></span></dt> <dd><p>Specifies that the expression "\\b" should not be matched against the sub-sequence [first,first). </p></dd> <dt><span class="term"><code class="computeroutput">match_not_eow</code><a name="boost.xpressive.regex_constants.match_flag_type.match_not_eow"></a></span></dt> <dd><p>Specifies that the expression "\\b" should not be matched against the sub-sequence [last,last). </p></dd> <dt><span class="term"><code class="computeroutput">match_any</code><a name="boost.xpressive.regex_constants.match_flag_type.match_any"></a></span></dt> <dd><p>Specifies that if more than one match is possible then any match is an acceptable result. </p></dd> <dt><span class="term"><code class="computeroutput">match_not_null</code><a name="boost.xpressive.regex_constants.match_flag_type.match_not_null"></a></span></dt> <dd><p>Specifies that the expression can not be matched against an empty sequence. </p></dd> <dt><span class="term"><code class="computeroutput">match_continuous</code><a name="boost.xpressive.regex_constants.match_flag_type.match_continuous"></a></span></dt> <dd><p>Specifies that the expression must match a sub-sequence that begins at first. </p></dd> <dt><span class="term"><code class="computeroutput">match_partial</code><a name="boost.xpressive.regex_constants.match_flag_type.match_partial"></a></span></dt> <dd><p>Specifies that if no match can be found, then it is acceptable to return a match [from, last) where from != last, if there exists some sequence of characters [from,to) of which [from,last) is a prefix, and which would result in a full match. </p></dd> <dt><span class="term"><code class="computeroutput">match_prev_avail</code><a name="boost.xpressive.regex_constants.match_flag_type.match_prev_avail"></a></span></dt> <dd><p>Specifies that <span style="color: red">&lt;ndash&gt;&lt;/ndash&gt;</span>first is a valid iterator position, when this flag is set then the flags match_not_bol and match_not_bow are ignored by the regular expression algorithms (RE.7) and iterators (RE.8). </p></dd> <dt><span class="term"><code class="computeroutput">format_default</code><a name="boost.xpressive.regex_constants.match_flag_type.format_default"></a></span></dt> <dd><p>Specifies that when a regular expression match is to be replaced by a new string, that the new string is constructed using the rules used by the ECMAScript replace function in ECMA-262, ECMAScript Language Specification, Chapter 15 part 5.4.11 String.prototype.replace. (FWD.1). In addition during search and replace operations then all non-overlapping occurrences of the regular expression are located and replaced, and sections of the input that did not match the expression, are copied unchanged to the output string. </p></dd> <dt><span class="term"><code class="computeroutput">format_sed</code><a name="boost.xpressive.regex_constants.match_flag_type.format_sed"></a></span></dt> <dd><p>Specifies that when a regular expression match is to be replaced by a new string, that the new string is constructed using the rules used by the Unix sed utility in IEEE Std 1003.1-2001, Portable Operating SystemInterface (POSIX), Shells and Utilities. </p></dd> <dt><span class="term"><code class="computeroutput">format_perl</code><a name="boost.xpressive.regex_constants.match_flag_type.format_perl"></a></span></dt> <dd><p>Specifies that when a regular expression match is to be replaced by a new string, that the new string is constructed using an implementation defined superset of the rules used by the ECMAScript replace function in ECMA-262, ECMAScript Language Specification, Chapter 15 part 5.4.11 String.prototype.replace (FWD.1). </p></dd> <dt><span class="term"><code class="computeroutput">format_no_copy</code><a name="boost.xpressive.regex_constants.match_flag_type.format_no_copy"></a></span></dt> <dd><p>When specified during a search and replace operation, then sections of the character container sequence being searched that do match the regular expression, are not copied to the output string. </p></dd> <dt><span class="term"><code class="computeroutput">format_first_only</code><a name="boost.xpressive.regex_constants.match_flag_type.format_first_only"></a></span></dt> <dd><p>When specified during a search and replace operation, then only the first occurrence of the regular expression is replaced. </p></dd> <dt><span class="term"><code class="computeroutput">format_literal</code><a name="boost.xpressive.regex_constants.match_flag_type.format_literal"></a></span></dt> <dd><p>Treat the format string as a literal. </p></dd> <dt><span class="term"><code class="computeroutput">format_all</code><a name="boost.xpressive.regex_constants.match_flag_type.format_all"></a></span></dt> <dd><p>Specifies that all syntax extensions are enabled, including conditional (?ddexpression1:expression2) replacements. </p></dd> </dl></div> </div> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2007 Eric Niebler<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="syntax_option_type.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../../xpressive/reference.html#header.boost.xpressive.regex_constants_hpp"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="error_type.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
apache-2.0
claimsmall/google-api-ads-ruby
adwords_api/examples/v201506/targeting/get_targetable_languages_and_carriers.rb
3071
#!/usr/bin/env ruby # Encoding: utf-8 # # Author:: [email protected] (Danial Klimkin) # # Copyright:: Copyright 2011, Google Inc. All Rights Reserved. # # License:: Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # This example illustrates how to retrieve all languages and carriers available # for targeting. # # Tags: ConstantDataService.getLanguageCriterion # Tags: ConstantDataService.getCarrierCriterion require 'adwords_api' def get_targetable_languages_and_carriers() # AdwordsApi::Api will read a config file from ENV['HOME']/adwords_api.yml # when called without parameters. adwords = AdwordsApi::Api.new # To enable logging of SOAP requests, set the log_level value to 'DEBUG' in # the configuration file or provide your own logger: # adwords.logger = Logger.new('adwords_xml.log') constant_data_srv = adwords.service(:ConstantDataService, API_VERSION) # Get all languages from ConstantDataService. languages = constant_data_srv.get_language_criterion() if languages languages.each do |language| puts "Language name is '%s', ID is %d and code is '%s'." % [language[:name], language[:id], language[:code]] end else puts 'No languages were found.' end # Get all carriers from ConstantDataService. carriers = constant_data_srv.get_carrier_criterion() if carriers carriers.each do |carrier| puts "Carrier name is '%s', ID is %d and country code is '%s'." % [carrier[:name], carrier[:id], carrier[:country_code]] end else puts 'No carriers were retrieved.' end end if __FILE__ == $0 API_VERSION = :v201506 begin get_targetable_languages_and_carriers() # Authorization error. rescue AdsCommon::Errors::OAuth2VerificationRequired => e puts "Authorization credentials are not valid. Edit adwords_api.yml for " + "OAuth2 client ID and secret and run misc/setup_oauth2.rb example " + "to retrieve and store OAuth2 tokens." puts "See this wiki page for more details:\n\n " + 'http://code.google.com/p/google-api-ads-ruby/wiki/OAuth2' # HTTP errors. rescue AdsCommon::Errors::HttpError => e puts "HTTP Error: %s" % e # API errors. rescue AdwordsApi::Errors::ApiException => e puts "Message: %s" % e.message puts 'Errors:' e.errors.each_with_index do |error, index| puts "\tError [%d]:" % (index + 1) error.each do |field, value| puts "\t\t%s: %s" % [field, value] end end end end
apache-2.0
rastasheep/trash-can
cf/commands/application/restage.go
2353
package application import ( "github.com/cloudfoundry/cli/cf/api/applications" "github.com/cloudfoundry/cli/cf/command_metadata" "github.com/cloudfoundry/cli/cf/configuration/core_config" "github.com/cloudfoundry/cli/cf/errors" . "github.com/cloudfoundry/cli/cf/i18n" "github.com/cloudfoundry/cli/cf/models" "github.com/cloudfoundry/cli/cf/requirements" "github.com/cloudfoundry/cli/cf/terminal" "github.com/codegangsta/cli" ) type Restage struct { ui terminal.UI config core_config.Reader appRepo applications.ApplicationRepository appStagingWatcher ApplicationStagingWatcher } func NewRestage(ui terminal.UI, config core_config.Reader, appRepo applications.ApplicationRepository, stagingWatcher ApplicationStagingWatcher) *Restage { cmd := new(Restage) cmd.ui = ui cmd.config = config cmd.appRepo = appRepo cmd.appStagingWatcher = stagingWatcher return cmd } func (cmd *Restage) Metadata() command_metadata.CommandMetadata { return command_metadata.CommandMetadata{ Name: "restage", ShortName: "rg", Description: T("Restage an app"), Usage: T("CF_NAME restage APP_NAME"), } } func (cmd *Restage) GetRequirements(requirementsFactory requirements.Factory, c *cli.Context) (reqs []requirements.Requirement, err error) { if len(c.Args()) != 1 { cmd.ui.FailWithUsage(c) } reqs = []requirements.Requirement{ requirementsFactory.NewLoginRequirement(), requirementsFactory.NewTargetedSpaceRequirement(), } return } func (cmd *Restage) Run(c *cli.Context) { app, err := cmd.appRepo.Read(c.Args()[0]) if notFound, ok := err.(*errors.ModelNotFoundError); ok { cmd.ui.Failed(notFound.Error()) } cmd.ui.Say(T("Restaging app {{.AppName}} in org {{.OrgName}} / space {{.SpaceName}} as {{.CurrentUser}}...", map[string]interface{}{ "AppName": terminal.EntityNameColor(app.Name), "OrgName": terminal.EntityNameColor(cmd.config.OrganizationFields().Name), "SpaceName": terminal.EntityNameColor(cmd.config.SpaceFields().Name), "CurrentUser": terminal.EntityNameColor(cmd.config.Username()), })) cmd.appStagingWatcher.ApplicationWatchStaging(app, cmd.config.OrganizationFields().Name, cmd.config.SpaceFields().Name, func(app models.Application) (models.Application, error) { return app, cmd.appRepo.CreateRestageRequest(app.Guid) }) }
apache-2.0
XidongHuang/aws-sdk-for-java
src/main/java/com/amazonaws/services/elasticmapreduce/model/ActionOnFailure.java
1818
/* * Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticmapreduce.model; /** * Action On Failure */ public enum ActionOnFailure { TERMINATE_JOB_FLOW("TERMINATE_JOB_FLOW"), CANCEL_AND_WAIT("CANCEL_AND_WAIT"), CONTINUE("CONTINUE"); private String value; private ActionOnFailure(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return ActionOnFailure corresponding to the value */ public static ActionOnFailure fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } else if ("TERMINATE_JOB_FLOW".equals(value)) { return ActionOnFailure.TERMINATE_JOB_FLOW; } else if ("CANCEL_AND_WAIT".equals(value)) { return ActionOnFailure.CANCEL_AND_WAIT; } else if ("CONTINUE".equals(value)) { return ActionOnFailure.CONTINUE; } else { throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } } }
apache-2.0
beaufortfrancois/WebFundamentals
src/content/en/resources/jekyll/_code/example.js
261
'use strict'; /* // [START classdefinition] */ export default class exampleClass { /* // [END classdefinition] */ constructor () { super(); console.log('Example Constructor'); } exampleFunction () { console.log('Example Function'); } }
apache-2.0
harish961/Hygieia-WFN
collectors/artifact/artifactory/docker/Dockerfile
388
FROM docker.io/java:openjdk-8-jdk MAINTAINER [email protected] RUN \ mkdir /hygieia COPY *.jar /hygieia/ COPY artifactory-properties-builder.sh /hygieia/ WORKDIR /hygieia VOLUME ["/hygieia/logs"] CMD ./artifactory-properties-builder.sh && \ java -jar artifactory-artifact-collector*.jar --spring.config.location=/hygieia/hygieia-artifactory-artifact-collector.properties
apache-2.0
JohnSZhang/contour
COPYRIGHT.md
580
Copyright 2014 Forio Corporation. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this software except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
apache-2.0
rrenomeron/cas
core/cas-server-core-logout/src/test/java/org/apereo/cas/logout/LogoutHttpMessageTests.java
562
package org.apereo.cas.logout; import lombok.val; import org.junit.jupiter.api.Test; import java.net.URL; import static org.junit.jupiter.api.Assertions.*; /** * This is {@link LogoutHttpMessageTests}. * * @author Misagh Moayyed * @since 6.0.0 */ public class LogoutHttpMessageTests { @Test public void verifyOperation() throws Exception { val message = new LogoutHttpMessage(new URL("https://github.com"), "LogoutMessage", false); assertTrue(message.getMessage().startsWith(LogoutHttpMessage.LOGOUT_REQUEST_PARAMETER)); } }
apache-2.0
efortuna/AndroidSDKClone
ndk_experimental/prebuilt/linux-x86_64/lib/perl5/5.16.2/unicore/lib/Jg/Seen.pl
481
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!! # This file is machine-generated by lib/unicore/mktables from the Unicode # database, Version 6.1.0. Any changes made here will be lost! # !!!!!!! INTERNAL PERL USE ONLY !!!!!!! # This file is for internal use by core Perl only. The format and even the # name or existence of this file are subject to change without notice. Don't # use it directly. return <<'END'; 0633 0634 069A 069C 06FA 075C 076D 0770 077D 077E END
apache-2.0
paninetworks/neutron
neutron/tests/unit/db/quota/test_api.py
10763
# Copyright (c) 2015 OpenStack Foundation. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron import context from neutron.db.quota import api as quota_api from neutron.tests.unit import testlib_api class TestQuotaDbApi(testlib_api.SqlTestCaseLight): def _set_context(self): self.tenant_id = 'Higuain' self.context = context.Context('Gonzalo', self.tenant_id, is_admin=False, is_advsvc=False) def _create_quota_usage(self, resource, used, reserved, tenant_id=None): tenant_id = tenant_id or self.tenant_id return quota_api.set_quota_usage( self.context, resource, tenant_id, in_use=used, reserved=reserved) def _verify_quota_usage(self, usage_info, expected_resource=None, expected_used=None, expected_reserved=None, expected_dirty=None): self.assertEqual(self.tenant_id, usage_info.tenant_id) if expected_resource: self.assertEqual(expected_resource, usage_info.resource) if expected_dirty is not None: self.assertEqual(expected_dirty, usage_info.dirty) if expected_used is not None: self.assertEqual(expected_used, usage_info.used) if expected_reserved is not None: self.assertEqual(expected_reserved, usage_info.reserved) if expected_used is not None and expected_reserved is not None: self.assertEqual(expected_used + expected_reserved, usage_info.total) def setUp(self): super(TestQuotaDbApi, self).setUp() self._set_context() def test_create_quota_usage(self): usage_info = self._create_quota_usage('goals', 26, 10) self._verify_quota_usage(usage_info, expected_resource='goals', expected_used=26, expected_reserved=10) def test_update_quota_usage(self): self._create_quota_usage('goals', 26, 10) # Higuain scores a double usage_info_1 = quota_api.set_quota_usage( self.context, 'goals', self.tenant_id, in_use=28) self._verify_quota_usage(usage_info_1, expected_used=28, expected_reserved=10) usage_info_2 = quota_api.set_quota_usage( self.context, 'goals', self.tenant_id, reserved=8) self._verify_quota_usage(usage_info_2, expected_used=28, expected_reserved=8) def test_update_quota_usage_with_deltas(self): self._create_quota_usage('goals', 26, 10) # Higuain scores a double usage_info_1 = quota_api.set_quota_usage( self.context, 'goals', self.tenant_id, in_use=2, delta=True) self._verify_quota_usage(usage_info_1, expected_used=28, expected_reserved=10) usage_info_2 = quota_api.set_quota_usage( self.context, 'goals', self.tenant_id, reserved=-2, delta=True) self._verify_quota_usage(usage_info_2, expected_used=28, expected_reserved=8) def test_set_quota_usage_dirty(self): self._create_quota_usage('goals', 26, 10) # Higuain needs a shower after the match self.assertEqual(1, quota_api.set_quota_usage_dirty( self.context, 'goals', self.tenant_id)) usage_info = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'goals', self.tenant_id) self._verify_quota_usage(usage_info, expected_dirty=True) # Higuain is clean now self.assertEqual(1, quota_api.set_quota_usage_dirty( self.context, 'goals', self.tenant_id, dirty=False)) usage_info = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'goals', self.tenant_id) self._verify_quota_usage(usage_info, expected_dirty=False) def test_set_dirty_non_existing_quota_usage(self): self.assertEqual(0, quota_api.set_quota_usage_dirty( self.context, 'meh', self.tenant_id)) def test_set_resources_quota_usage_dirty(self): self._create_quota_usage('goals', 26, 10) self._create_quota_usage('assists', 11, 5) self._create_quota_usage('bookings', 3, 1) self.assertEqual(2, quota_api.set_resources_quota_usage_dirty( self.context, ['goals', 'bookings'], self.tenant_id)) usage_info_goals = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'goals', self.tenant_id) usage_info_assists = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'assists', self.tenant_id) usage_info_bookings = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'bookings', self.tenant_id) self._verify_quota_usage(usage_info_goals, expected_dirty=True) self._verify_quota_usage(usage_info_assists, expected_dirty=False) self._verify_quota_usage(usage_info_bookings, expected_dirty=True) def test_set_resources_quota_usage_dirty_with_empty_list(self): self._create_quota_usage('goals', 26, 10) self._create_quota_usage('assists', 11, 5) self._create_quota_usage('bookings', 3, 1) # Expect all the resources for the tenant to be set dirty self.assertEqual(3, quota_api.set_resources_quota_usage_dirty( self.context, [], self.tenant_id)) usage_info_goals = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'goals', self.tenant_id) usage_info_assists = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'assists', self.tenant_id) usage_info_bookings = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'bookings', self.tenant_id) self._verify_quota_usage(usage_info_goals, expected_dirty=True) self._verify_quota_usage(usage_info_assists, expected_dirty=True) self._verify_quota_usage(usage_info_bookings, expected_dirty=True) # Higuain is clean now self.assertEqual(1, quota_api.set_quota_usage_dirty( self.context, 'goals', self.tenant_id, dirty=False)) usage_info = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'goals', self.tenant_id) self._verify_quota_usage(usage_info, expected_dirty=False) def _test_set_all_quota_usage_dirty(self, expected): self._create_quota_usage('goals', 26, 10) self._create_quota_usage('goals', 12, 6, tenant_id='Callejon') self.assertEqual(expected, quota_api.set_all_quota_usage_dirty( self.context, 'goals')) def test_set_all_quota_usage_dirty(self): # All goal scorers need a shower after the match, but since this is not # admin context we can clean only one self._test_set_all_quota_usage_dirty(expected=1) def test_get_quota_usage_by_tenant(self): self._create_quota_usage('goals', 26, 10) self._create_quota_usage('assists', 11, 5) # Create a resource for a different tenant self._create_quota_usage('mehs', 99, 99, tenant_id='buffon') usage_infos = quota_api.get_quota_usage_by_tenant_id( self.context, self.tenant_id) self.assertEqual(2, len(usage_infos)) resources = [info.resource for info in usage_infos] self.assertIn('goals', resources) self.assertIn('assists', resources) def test_get_quota_usage_by_resource(self): self._create_quota_usage('goals', 26, 10) self._create_quota_usage('assists', 11, 5) self._create_quota_usage('goals', 12, 6, tenant_id='Callejon') usage_infos = quota_api.get_quota_usage_by_resource( self.context, 'goals') # Only 1 result expected in tenant context self.assertEqual(1, len(usage_infos)) self._verify_quota_usage(usage_infos[0], expected_resource='goals', expected_used=26, expected_reserved=10) def test_get_quota_usage_by_tenant_and_resource(self): self._create_quota_usage('goals', 26, 10) usage_info = quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'goals', self.tenant_id) self._verify_quota_usage(usage_info, expected_resource='goals', expected_used=26, expected_reserved=10) def test_get_non_existing_quota_usage_returns_none(self): self.assertIsNone(quota_api.get_quota_usage_by_resource_and_tenant( self.context, 'goals', self.tenant_id)) class TestQuotaDbApiAdminContext(TestQuotaDbApi): def _set_context(self): self.tenant_id = 'Higuain' self.context = context.Context('Gonzalo', self.tenant_id, is_admin=True, is_advsvc=True, load_admin_roles=False) def test_get_quota_usage_by_resource(self): self._create_quota_usage('goals', 26, 10) self._create_quota_usage('assists', 11, 5) self._create_quota_usage('goals', 12, 6, tenant_id='Callejon') usage_infos = quota_api.get_quota_usage_by_resource( self.context, 'goals') # 2 results expected in admin context self.assertEqual(2, len(usage_infos)) for usage_info in usage_infos: self.assertEqual('goals', usage_info.resource) def test_set_all_quota_usage_dirty(self): # All goal scorers need a shower after the match, and with admin # context we should be able to clean all of them self._test_set_all_quota_usage_dirty(expected=2)
apache-2.0
ravilr/kubernetes
pkg/kubelet/volumemanager/cache/actual_state_of_world.go
36672
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* Package cache implements data structures used by the kubelet volume manager to keep track of attached volumes and the pods that mounted them. */ package cache import ( "fmt" "sync" v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/types" utilfeature "k8s.io/apiserver/pkg/util/feature" "k8s.io/klog/v2" "k8s.io/kubernetes/pkg/features" "k8s.io/kubernetes/pkg/volume" "k8s.io/kubernetes/pkg/volume/util" "k8s.io/kubernetes/pkg/volume/util/operationexecutor" volumetypes "k8s.io/kubernetes/pkg/volume/util/types" ) // ActualStateOfWorld defines a set of thread-safe operations for the kubelet // volume manager's actual state of the world cache. // This cache contains volumes->pods i.e. a set of all volumes attached to this // node and the pods that the manager believes have successfully mounted the // volume. // Note: This is distinct from the ActualStateOfWorld implemented by the // attach/detach controller. They both keep track of different objects. This // contains kubelet volume manager specific state. type ActualStateOfWorld interface { // ActualStateOfWorld must implement the methods required to allow // operationexecutor to interact with it. operationexecutor.ActualStateOfWorldMounterUpdater // ActualStateOfWorld must implement the methods required to allow // operationexecutor to interact with it. operationexecutor.ActualStateOfWorldAttacherUpdater // AddPodToVolume adds the given pod to the given volume in the cache // indicating the specified volume has been successfully mounted to the // specified pod. // If a pod with the same unique name already exists under the specified // volume, reset the pod's remountRequired value. // If a volume with the name volumeName does not exist in the list of // attached volumes, an error is returned. AddPodToVolume(operationexecutor.MarkVolumeOpts) error // MarkRemountRequired marks each volume that is successfully attached and // mounted for the specified pod as requiring remount (if the plugin for the // volume indicates it requires remounting on pod updates). Atomically // updating volumes depend on this to update the contents of the volume on // pod update. MarkRemountRequired(podName volumetypes.UniquePodName) // SetDeviceMountState sets device mount state for the given volume. When deviceMountState is set to DeviceGloballyMounted // then device is mounted at a global mount point. When it is set to DeviceMountUncertain then also it means volume // MAY be globally mounted at a global mount point. In both cases - the volume must be unmounted from // global mount point prior to detach. // If a volume with the name volumeName does not exist in the list of // attached volumes, an error is returned. SetDeviceMountState(volumeName v1.UniqueVolumeName, deviceMountState operationexecutor.DeviceMountState, devicePath, deviceMountPath string) error // DeletePodFromVolume removes the given pod from the given volume in the // cache indicating the volume has been successfully unmounted from the pod. // If a pod with the same unique name does not exist under the specified // volume, this is a no-op. // If a volume with the name volumeName does not exist in the list of // attached volumes, an error is returned. DeletePodFromVolume(podName volumetypes.UniquePodName, volumeName v1.UniqueVolumeName) error // DeleteVolume removes the given volume from the list of attached volumes // in the cache indicating the volume has been successfully detached from // this node. // If a volume with the name volumeName does not exist in the list of // attached volumes, this is a no-op. // If a volume with the name volumeName exists and its list of mountedPods // is not empty, an error is returned. DeleteVolume(volumeName v1.UniqueVolumeName) error // PodExistsInVolume returns true if the given pod exists in the list of // mountedPods for the given volume in the cache, indicating that the volume // is attached to this node and the pod has successfully mounted it. // If a pod with the same unique name does not exist under the specified // volume, false is returned. // If a volume with the name volumeName does not exist in the list of // attached volumes, a volumeNotAttachedError is returned indicating the // given volume is not yet attached. // If the given volumeName/podName combo exists but the value of // remountRequired is true, a remountRequiredError is returned indicating // the given volume has been successfully mounted to this pod but should be // remounted to reflect changes in the referencing pod. Atomically updating // volumes, depend on this to update the contents of the volume. // All volume mounting calls should be idempotent so a second mount call for // volumes that do not need to update contents should not fail. PodExistsInVolume(podName volumetypes.UniquePodName, volumeName v1.UniqueVolumeName) (bool, string, error) // PodRemovedFromVolume returns true if the given pod does not exist in the list of // mountedPods for the given volume in the cache, indicating that the pod has // fully unmounted it or it was never mounted the volume. // If the volume is fully mounted or is in uncertain mount state for the pod, it is // considered that the pod still exists in volume manager's actual state of the world // and false is returned. PodRemovedFromVolume(podName volumetypes.UniquePodName, volumeName v1.UniqueVolumeName) bool // VolumeExistsWithSpecName returns true if the given volume specified with the // volume spec name (a.k.a., InnerVolumeSpecName) exists in the list of // volumes that should be attached to this node. // If a pod with the same name does not exist under the specified // volume, false is returned. VolumeExistsWithSpecName(podName volumetypes.UniquePodName, volumeSpecName string) bool // VolumeExists returns true if the given volume exists in the list of // attached volumes in the cache, indicating the volume is attached to this // node. VolumeExists(volumeName v1.UniqueVolumeName) bool // GetMountedVolumes generates and returns a list of volumes and the pods // they are successfully attached and mounted for based on the current // actual state of the world. GetMountedVolumes() []MountedVolume // GetAllMountedVolumes returns list of all possibly mounted volumes including // those that are in VolumeMounted state and VolumeMountUncertain state. GetAllMountedVolumes() []MountedVolume // GetMountedVolumesForPod generates and returns a list of volumes that are // successfully attached and mounted for the specified pod based on the // current actual state of the world. GetMountedVolumesForPod(podName volumetypes.UniquePodName) []MountedVolume // GetPossiblyMountedVolumesForPod generates and returns a list of volumes for // the specified pod that either are attached and mounted or are "uncertain", // i.e. a volume plugin may be mounting the volume right now. GetPossiblyMountedVolumesForPod(podName volumetypes.UniquePodName) []MountedVolume // GetGloballyMountedVolumes generates and returns a list of all attached // volumes that are globally mounted. This list can be used to determine // which volumes should be reported as "in use" in the node's VolumesInUse // status field. Globally mounted here refers to the shared plugin mount // point for the attachable volume from which the pod specific mount points // are created (via bind mount). GetGloballyMountedVolumes() []AttachedVolume // GetUnmountedVolumes generates and returns a list of attached volumes that // have no mountedPods. This list can be used to determine which volumes are // no longer referenced and may be globally unmounted and detached. GetUnmountedVolumes() []AttachedVolume // MarkFSResizeRequired marks each volume that is successfully attached and // mounted for the specified pod as requiring file system resize (if the plugin for the // volume indicates it requires file system resize). MarkFSResizeRequired(volumeName v1.UniqueVolumeName, podName volumetypes.UniquePodName) // GetAttachedVolumes returns a list of volumes that is known to be attached // to the node. This list can be used to determine volumes that are either in-use // or have a mount/unmount operation pending. GetAttachedVolumes() []AttachedVolume } // MountedVolume represents a volume that has successfully been mounted to a pod. type MountedVolume struct { operationexecutor.MountedVolume } // AttachedVolume represents a volume that is attached to a node. type AttachedVolume struct { operationexecutor.AttachedVolume // DeviceMountState indicates if device has been globally mounted or is not. DeviceMountState operationexecutor.DeviceMountState } // DeviceMayBeMounted returns true if device is mounted in global path or is in // uncertain state. func (av AttachedVolume) DeviceMayBeMounted() bool { return av.DeviceMountState == operationexecutor.DeviceGloballyMounted || av.DeviceMountState == operationexecutor.DeviceMountUncertain } // NewActualStateOfWorld returns a new instance of ActualStateOfWorld. func NewActualStateOfWorld( nodeName types.NodeName, volumePluginMgr *volume.VolumePluginMgr) ActualStateOfWorld { return &actualStateOfWorld{ nodeName: nodeName, attachedVolumes: make(map[v1.UniqueVolumeName]attachedVolume), volumePluginMgr: volumePluginMgr, } } // IsVolumeNotAttachedError returns true if the specified error is a // volumeNotAttachedError. func IsVolumeNotAttachedError(err error) bool { _, ok := err.(volumeNotAttachedError) return ok } // IsRemountRequiredError returns true if the specified error is a // remountRequiredError. func IsRemountRequiredError(err error) bool { _, ok := err.(remountRequiredError) return ok } type actualStateOfWorld struct { // nodeName is the name of this node. This value is passed to Attach/Detach nodeName types.NodeName // attachedVolumes is a map containing the set of volumes the kubelet volume // manager believes to be successfully attached to this node. Volume types // that do not implement an attacher interface are assumed to be in this // state by default. // The key in this map is the name of the volume and the value is an object // containing more information about the attached volume. attachedVolumes map[v1.UniqueVolumeName]attachedVolume // volumePluginMgr is the volume plugin manager used to create volume // plugin objects. volumePluginMgr *volume.VolumePluginMgr sync.RWMutex } // attachedVolume represents a volume the kubelet volume manager believes to be // successfully attached to a node it is managing. Volume types that do not // implement an attacher are assumed to be in this state. type attachedVolume struct { // volumeName contains the unique identifier for this volume. volumeName v1.UniqueVolumeName // mountedPods is a map containing the set of pods that this volume has been // successfully mounted to. The key in this map is the name of the pod and // the value is a mountedPod object containing more information about the // pod. mountedPods map[volumetypes.UniquePodName]mountedPod // spec is the volume spec containing the specification for this volume. // Used to generate the volume plugin object, and passed to plugin methods. // In particular, the Unmount method uses spec.Name() as the volumeSpecName // in the mount path: // /var/lib/kubelet/pods/{podUID}/volumes/{escapeQualifiedPluginName}/{volumeSpecName}/ spec *volume.Spec // pluginName is the Unescaped Qualified name of the volume plugin used to // attach and mount this volume. It is stored separately in case the full // volume spec (everything except the name) can not be reconstructed for a // volume that should be unmounted (which would be the case for a mount path // read from disk without a full volume spec). pluginName string // pluginIsAttachable indicates the volume plugin used to attach and mount // this volume implements the volume.Attacher interface pluginIsAttachable bool // deviceMountState stores information that tells us if device is mounted // globally or not deviceMountState operationexecutor.DeviceMountState // devicePath contains the path on the node where the volume is attached for // attachable volumes devicePath string // deviceMountPath contains the path on the node where the device should // be mounted after it is attached. deviceMountPath string // volumeInUseErrorForExpansion indicates volume driver has previously returned volume-in-use error // for this volume and volume expansion on this node should not be retried volumeInUseErrorForExpansion bool } // The mountedPod object represents a pod for which the kubelet volume manager // believes the underlying volume has been successfully been mounted. type mountedPod struct { // the name of the pod podName volumetypes.UniquePodName // the UID of the pod podUID types.UID // mounter used to mount mounter volume.Mounter // mapper used to block volumes support blockVolumeMapper volume.BlockVolumeMapper // spec is the volume spec containing the specification for this volume. // Used to generate the volume plugin object, and passed to plugin methods. // In particular, the Unmount method uses spec.Name() as the volumeSpecName // in the mount path: // /var/lib/kubelet/pods/{podUID}/volumes/{escapeQualifiedPluginName}/{volumeSpecName}/ volumeSpec *volume.Spec // outerVolumeSpecName is the volume.Spec.Name() of the volume as referenced // directly in the pod. If the volume was referenced through a persistent // volume claim, this contains the volume.Spec.Name() of the persistent // volume claim outerVolumeSpecName string // remountRequired indicates the underlying volume has been successfully // mounted to this pod but it should be remounted to reflect changes in the // referencing pod. // Atomically updating volumes depend on this to update the contents of the // volume. All volume mounting calls should be idempotent so a second mount // call for volumes that do not need to update contents should not fail. remountRequired bool // volumeGidValue contains the value of the GID annotation, if present. volumeGidValue string // fsResizeRequired indicates the underlying volume has been successfully // mounted to this pod but its size has been expanded after that. fsResizeRequired bool // volumeMountStateForPod stores state of volume mount for the pod. if it is: // - VolumeMounted: means volume for pod has been successfully mounted // - VolumeMountUncertain: means volume for pod may not be mounted, but it must be unmounted volumeMountStateForPod operationexecutor.VolumeMountState } func (asw *actualStateOfWorld) MarkVolumeAsAttached( volumeName v1.UniqueVolumeName, volumeSpec *volume.Spec, _ types.NodeName, devicePath string) error { return asw.addVolume(volumeName, volumeSpec, devicePath) } func (asw *actualStateOfWorld) MarkVolumeAsUncertain( volumeName v1.UniqueVolumeName, volumeSpec *volume.Spec, _ types.NodeName) error { return nil } func (asw *actualStateOfWorld) MarkVolumeAsDetached( volumeName v1.UniqueVolumeName, nodeName types.NodeName) { asw.DeleteVolume(volumeName) } func (asw *actualStateOfWorld) MarkVolumeAsMounted(markVolumeOpts operationexecutor.MarkVolumeOpts) error { return asw.AddPodToVolume(markVolumeOpts) } func (asw *actualStateOfWorld) AddVolumeToReportAsAttached(volumeName v1.UniqueVolumeName, nodeName types.NodeName) { // no operation for kubelet side } func (asw *actualStateOfWorld) RemoveVolumeFromReportAsAttached(volumeName v1.UniqueVolumeName, nodeName types.NodeName) error { // no operation for kubelet side return nil } func (asw *actualStateOfWorld) MarkVolumeAsUnmounted( podName volumetypes.UniquePodName, volumeName v1.UniqueVolumeName) error { return asw.DeletePodFromVolume(podName, volumeName) } func (asw *actualStateOfWorld) MarkDeviceAsMounted( volumeName v1.UniqueVolumeName, devicePath, deviceMountPath string) error { return asw.SetDeviceMountState(volumeName, operationexecutor.DeviceGloballyMounted, devicePath, deviceMountPath) } func (asw *actualStateOfWorld) MarkDeviceAsUncertain( volumeName v1.UniqueVolumeName, devicePath, deviceMountPath string) error { return asw.SetDeviceMountState(volumeName, operationexecutor.DeviceMountUncertain, devicePath, deviceMountPath) } func (asw *actualStateOfWorld) MarkVolumeMountAsUncertain(markVolumeOpts operationexecutor.MarkVolumeOpts) error { markVolumeOpts.VolumeMountState = operationexecutor.VolumeMountUncertain return asw.AddPodToVolume(markVolumeOpts) } func (asw *actualStateOfWorld) MarkDeviceAsUnmounted( volumeName v1.UniqueVolumeName) error { return asw.SetDeviceMountState(volumeName, operationexecutor.DeviceNotMounted, "", "") } func (asw *actualStateOfWorld) GetDeviceMountState(volumeName v1.UniqueVolumeName) operationexecutor.DeviceMountState { asw.RLock() defer asw.RUnlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return operationexecutor.DeviceNotMounted } return volumeObj.deviceMountState } func (asw *actualStateOfWorld) MarkForInUseExpansionError(volumeName v1.UniqueVolumeName) { asw.Lock() defer asw.Unlock() volumeObj, ok := asw.attachedVolumes[volumeName] if ok { volumeObj.volumeInUseErrorForExpansion = true asw.attachedVolumes[volumeName] = volumeObj } } func (asw *actualStateOfWorld) GetVolumeMountState(volumeName v1.UniqueVolumeName, podName volumetypes.UniquePodName) operationexecutor.VolumeMountState { asw.RLock() defer asw.RUnlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return operationexecutor.VolumeNotMounted } podObj, podExists := volumeObj.mountedPods[podName] if !podExists { return operationexecutor.VolumeNotMounted } return podObj.volumeMountStateForPod } func (asw *actualStateOfWorld) IsVolumeMountedElsewhere(volumeName v1.UniqueVolumeName, podName volumetypes.UniquePodName) bool { asw.RLock() defer asw.RUnlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return false } for _, podObj := range volumeObj.mountedPods { if podName != podObj.podName { // Treat uncertain mount state as mounted until certain. if podObj.volumeMountStateForPod != operationexecutor.VolumeNotMounted { return true } } } return false } // addVolume adds the given volume to the cache indicating the specified // volume is attached to this node. If no volume name is supplied, a unique // volume name is generated from the volumeSpec and returned on success. If a // volume with the same generated name already exists, this is a noop. If no // volume plugin can support the given volumeSpec or more than one plugin can // support it, an error is returned. func (asw *actualStateOfWorld) addVolume( volumeName v1.UniqueVolumeName, volumeSpec *volume.Spec, devicePath string) error { asw.Lock() defer asw.Unlock() volumePlugin, err := asw.volumePluginMgr.FindPluginBySpec(volumeSpec) if err != nil || volumePlugin == nil { return fmt.Errorf( "failed to get Plugin from volumeSpec for volume %q err=%v", volumeSpec.Name(), err) } if len(volumeName) == 0 { volumeName, err = util.GetUniqueVolumeNameFromSpec(volumePlugin, volumeSpec) if err != nil { return fmt.Errorf( "failed to GetUniqueVolumeNameFromSpec for volumeSpec %q using volume plugin %q err=%v", volumeSpec.Name(), volumePlugin.GetPluginName(), err) } } pluginIsAttachable := false if attachablePlugin, err := asw.volumePluginMgr.FindAttachablePluginBySpec(volumeSpec); err == nil && attachablePlugin != nil { pluginIsAttachable = true } volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { volumeObj = attachedVolume{ volumeName: volumeName, spec: volumeSpec, mountedPods: make(map[volumetypes.UniquePodName]mountedPod), pluginName: volumePlugin.GetPluginName(), pluginIsAttachable: pluginIsAttachable, deviceMountState: operationexecutor.DeviceNotMounted, devicePath: devicePath, } } else { // If volume object already exists, update the fields such as device path volumeObj.devicePath = devicePath klog.V(2).InfoS("Volume is already added to attachedVolume list, update device path", "volumeName", volumeName, "path", devicePath) } asw.attachedVolumes[volumeName] = volumeObj return nil } func (asw *actualStateOfWorld) AddPodToVolume(markVolumeOpts operationexecutor.MarkVolumeOpts) error { podName := markVolumeOpts.PodName podUID := markVolumeOpts.PodUID volumeName := markVolumeOpts.VolumeName mounter := markVolumeOpts.Mounter blockVolumeMapper := markVolumeOpts.BlockVolumeMapper outerVolumeSpecName := markVolumeOpts.OuterVolumeSpecName volumeGidValue := markVolumeOpts.VolumeGidVolume volumeSpec := markVolumeOpts.VolumeSpec asw.Lock() defer asw.Unlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return fmt.Errorf( "no volume with the name %q exists in the list of attached volumes", volumeName) } podObj, podExists := volumeObj.mountedPods[podName] if !podExists { podObj = mountedPod{ podName: podName, podUID: podUID, mounter: mounter, blockVolumeMapper: blockVolumeMapper, outerVolumeSpecName: outerVolumeSpecName, volumeGidValue: volumeGidValue, volumeSpec: volumeSpec, volumeMountStateForPod: markVolumeOpts.VolumeMountState, } } // If pod exists, reset remountRequired value podObj.remountRequired = false podObj.volumeMountStateForPod = markVolumeOpts.VolumeMountState if mounter != nil { // The mounter stored in the object may have old information, // use the newest one. podObj.mounter = mounter } asw.attachedVolumes[volumeName].mountedPods[podName] = podObj return nil } func (asw *actualStateOfWorld) MarkVolumeAsResized( podName volumetypes.UniquePodName, volumeName v1.UniqueVolumeName) error { asw.Lock() defer asw.Unlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return fmt.Errorf( "no volume with the name %q exists in the list of attached volumes", volumeName) } podObj, podExists := volumeObj.mountedPods[podName] if !podExists { return fmt.Errorf( "no pod with the name %q exists in the mounted pods list of volume %s", podName, volumeName) } klog.V(5).InfoS("Pod volume has been resized", "uniquePodName", podName, "volumeName", volumeName, "outerVolumeSpecName", podObj.outerVolumeSpecName) podObj.fsResizeRequired = false asw.attachedVolumes[volumeName].mountedPods[podName] = podObj return nil } func (asw *actualStateOfWorld) MarkRemountRequired( podName volumetypes.UniquePodName) { asw.Lock() defer asw.Unlock() for volumeName, volumeObj := range asw.attachedVolumes { if podObj, podExists := volumeObj.mountedPods[podName]; podExists { volumePlugin, err := asw.volumePluginMgr.FindPluginBySpec(podObj.volumeSpec) if err != nil || volumePlugin == nil { // Log and continue processing klog.ErrorS(nil, "MarkRemountRequired failed to FindPluginBySpec for volume", "uniquePodName", podObj.podName, "podUID", podObj.podUID, "volumeName", volumeName, "volumeSpecName", podObj.volumeSpec.Name()) continue } if volumePlugin.RequiresRemount(podObj.volumeSpec) { podObj.remountRequired = true asw.attachedVolumes[volumeName].mountedPods[podName] = podObj } } } } func (asw *actualStateOfWorld) MarkFSResizeRequired( volumeName v1.UniqueVolumeName, podName volumetypes.UniquePodName) { asw.Lock() defer asw.Unlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { klog.InfoS("MarkFSResizeRequired for volume failed as volume does not exist", "volumeName", volumeName) return } podObj, podExists := volumeObj.mountedPods[podName] if !podExists { klog.InfoS("MarkFSResizeRequired for volume failed because the pod does not exist", "uniquePodName", podName, "volumeName", volumeName) return } volumePlugin, err := asw.volumePluginMgr.FindNodeExpandablePluginBySpec(podObj.volumeSpec) if err != nil || volumePlugin == nil { // Log and continue processing klog.ErrorS(nil, "MarkFSResizeRequired failed to find expandable plugin for volume", "uniquePodName", podObj.podName, "volumeName", volumeObj.volumeName, "volumeSpecName", podObj.volumeSpec.Name()) return } if volumePlugin.RequiresFSResize() { if !podObj.fsResizeRequired { klog.V(3).InfoS("PVC volume of the pod requires file system resize", "uniquePodName", podName, "volumeName", volumeName, "outerVolumeSpecName", podObj.outerVolumeSpecName) podObj.fsResizeRequired = true } asw.attachedVolumes[volumeName].mountedPods[podName] = podObj } } func (asw *actualStateOfWorld) SetDeviceMountState( volumeName v1.UniqueVolumeName, deviceMountState operationexecutor.DeviceMountState, devicePath, deviceMountPath string) error { asw.Lock() defer asw.Unlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return fmt.Errorf( "no volume with the name %q exists in the list of attached volumes", volumeName) } volumeObj.deviceMountState = deviceMountState volumeObj.deviceMountPath = deviceMountPath if devicePath != "" { volumeObj.devicePath = devicePath } asw.attachedVolumes[volumeName] = volumeObj return nil } func (asw *actualStateOfWorld) DeletePodFromVolume( podName volumetypes.UniquePodName, volumeName v1.UniqueVolumeName) error { asw.Lock() defer asw.Unlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return fmt.Errorf( "no volume with the name %q exists in the list of attached volumes", volumeName) } _, podExists := volumeObj.mountedPods[podName] if podExists { delete(asw.attachedVolumes[volumeName].mountedPods, podName) } return nil } func (asw *actualStateOfWorld) DeleteVolume(volumeName v1.UniqueVolumeName) error { asw.Lock() defer asw.Unlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return nil } if len(volumeObj.mountedPods) != 0 { return fmt.Errorf( "failed to DeleteVolume %q, it still has %v mountedPods", volumeName, len(volumeObj.mountedPods)) } delete(asw.attachedVolumes, volumeName) return nil } func (asw *actualStateOfWorld) PodExistsInVolume( podName volumetypes.UniquePodName, volumeName v1.UniqueVolumeName) (bool, string, error) { asw.RLock() defer asw.RUnlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return false, "", newVolumeNotAttachedError(volumeName) } podObj, podExists := volumeObj.mountedPods[podName] if podExists { // if volume mount was uncertain we should keep trying to mount the volume if podObj.volumeMountStateForPod == operationexecutor.VolumeMountUncertain { return false, volumeObj.devicePath, nil } if podObj.remountRequired { return true, volumeObj.devicePath, newRemountRequiredError(volumeObj.volumeName, podObj.podName) } if podObj.fsResizeRequired && !volumeObj.volumeInUseErrorForExpansion && utilfeature.DefaultFeatureGate.Enabled(features.ExpandInUsePersistentVolumes) { return true, volumeObj.devicePath, newFsResizeRequiredError(volumeObj.volumeName, podObj.podName) } } return podExists, volumeObj.devicePath, nil } func (asw *actualStateOfWorld) PodRemovedFromVolume( podName volumetypes.UniquePodName, volumeName v1.UniqueVolumeName) bool { asw.RLock() defer asw.RUnlock() volumeObj, volumeExists := asw.attachedVolumes[volumeName] if !volumeExists { return true } podObj, podExists := volumeObj.mountedPods[podName] if podExists { // if volume mount was uncertain we should keep trying to unmount the volume if podObj.volumeMountStateForPod == operationexecutor.VolumeMountUncertain { return false } if podObj.volumeMountStateForPod == operationexecutor.VolumeMounted { return false } } return true } func (asw *actualStateOfWorld) VolumeExistsWithSpecName(podName volumetypes.UniquePodName, volumeSpecName string) bool { asw.RLock() defer asw.RUnlock() for _, volumeObj := range asw.attachedVolumes { if podObj, podExists := volumeObj.mountedPods[podName]; podExists { if podObj.volumeSpec.Name() == volumeSpecName { return true } } } return false } func (asw *actualStateOfWorld) VolumeExists( volumeName v1.UniqueVolumeName) bool { asw.RLock() defer asw.RUnlock() _, volumeExists := asw.attachedVolumes[volumeName] return volumeExists } func (asw *actualStateOfWorld) GetMountedVolumes() []MountedVolume { asw.RLock() defer asw.RUnlock() mountedVolume := make([]MountedVolume, 0 /* len */, len(asw.attachedVolumes) /* cap */) for _, volumeObj := range asw.attachedVolumes { for _, podObj := range volumeObj.mountedPods { if podObj.volumeMountStateForPod == operationexecutor.VolumeMounted { mountedVolume = append( mountedVolume, getMountedVolume(&podObj, &volumeObj)) } } } return mountedVolume } // GetAllMountedVolumes returns all volumes which could be locally mounted for a pod. func (asw *actualStateOfWorld) GetAllMountedVolumes() []MountedVolume { asw.RLock() defer asw.RUnlock() mountedVolume := make([]MountedVolume, 0 /* len */, len(asw.attachedVolumes) /* cap */) for _, volumeObj := range asw.attachedVolumes { for _, podObj := range volumeObj.mountedPods { if podObj.volumeMountStateForPod == operationexecutor.VolumeMounted || podObj.volumeMountStateForPod == operationexecutor.VolumeMountUncertain { mountedVolume = append( mountedVolume, getMountedVolume(&podObj, &volumeObj)) } } } return mountedVolume } func (asw *actualStateOfWorld) GetMountedVolumesForPod( podName volumetypes.UniquePodName) []MountedVolume { asw.RLock() defer asw.RUnlock() mountedVolume := make([]MountedVolume, 0 /* len */, len(asw.attachedVolumes) /* cap */) for _, volumeObj := range asw.attachedVolumes { for mountedPodName, podObj := range volumeObj.mountedPods { if mountedPodName == podName && podObj.volumeMountStateForPod == operationexecutor.VolumeMounted { mountedVolume = append( mountedVolume, getMountedVolume(&podObj, &volumeObj)) } } } return mountedVolume } func (asw *actualStateOfWorld) GetPossiblyMountedVolumesForPod( podName volumetypes.UniquePodName) []MountedVolume { asw.RLock() defer asw.RUnlock() mountedVolume := make([]MountedVolume, 0 /* len */, len(asw.attachedVolumes) /* cap */) for _, volumeObj := range asw.attachedVolumes { for mountedPodName, podObj := range volumeObj.mountedPods { if mountedPodName == podName && (podObj.volumeMountStateForPod == operationexecutor.VolumeMounted || podObj.volumeMountStateForPod == operationexecutor.VolumeMountUncertain) { mountedVolume = append( mountedVolume, getMountedVolume(&podObj, &volumeObj)) } } } return mountedVolume } func (asw *actualStateOfWorld) GetGloballyMountedVolumes() []AttachedVolume { asw.RLock() defer asw.RUnlock() globallyMountedVolumes := make( []AttachedVolume, 0 /* len */, len(asw.attachedVolumes) /* cap */) for _, volumeObj := range asw.attachedVolumes { if volumeObj.deviceMountState == operationexecutor.DeviceGloballyMounted { globallyMountedVolumes = append( globallyMountedVolumes, asw.newAttachedVolume(&volumeObj)) } } return globallyMountedVolumes } func (asw *actualStateOfWorld) GetAttachedVolumes() []AttachedVolume { asw.RLock() defer asw.RUnlock() allAttachedVolumes := make( []AttachedVolume, 0 /* len */, len(asw.attachedVolumes) /* cap */) for _, volumeObj := range asw.attachedVolumes { allAttachedVolumes = append( allAttachedVolumes, asw.newAttachedVolume(&volumeObj)) } return allAttachedVolumes } func (asw *actualStateOfWorld) GetUnmountedVolumes() []AttachedVolume { asw.RLock() defer asw.RUnlock() unmountedVolumes := make([]AttachedVolume, 0 /* len */, len(asw.attachedVolumes) /* cap */) for _, volumeObj := range asw.attachedVolumes { if len(volumeObj.mountedPods) == 0 { unmountedVolumes = append( unmountedVolumes, asw.newAttachedVolume(&volumeObj)) } } return unmountedVolumes } func (asw *actualStateOfWorld) newAttachedVolume( attachedVolume *attachedVolume) AttachedVolume { return AttachedVolume{ AttachedVolume: operationexecutor.AttachedVolume{ VolumeName: attachedVolume.volumeName, VolumeSpec: attachedVolume.spec, NodeName: asw.nodeName, PluginIsAttachable: attachedVolume.pluginIsAttachable, DevicePath: attachedVolume.devicePath, DeviceMountPath: attachedVolume.deviceMountPath, PluginName: attachedVolume.pluginName}, DeviceMountState: attachedVolume.deviceMountState, } } // Compile-time check to ensure volumeNotAttachedError implements the error interface var _ error = volumeNotAttachedError{} // volumeNotAttachedError is an error returned when PodExistsInVolume() fails to // find specified volume in the list of attached volumes. type volumeNotAttachedError struct { volumeName v1.UniqueVolumeName } func (err volumeNotAttachedError) Error() string { return fmt.Sprintf( "volumeName %q does not exist in the list of attached volumes", err.volumeName) } func newVolumeNotAttachedError(volumeName v1.UniqueVolumeName) error { return volumeNotAttachedError{ volumeName: volumeName, } } // Compile-time check to ensure remountRequiredError implements the error interface var _ error = remountRequiredError{} // remountRequiredError is an error returned when PodExistsInVolume() found // volume/pod attached/mounted but remountRequired was true, indicating the // given volume should be remounted to the pod to reflect changes in the // referencing pod. type remountRequiredError struct { volumeName v1.UniqueVolumeName podName volumetypes.UniquePodName } func (err remountRequiredError) Error() string { return fmt.Sprintf( "volumeName %q is mounted to %q but should be remounted", err.volumeName, err.podName) } func newRemountRequiredError( volumeName v1.UniqueVolumeName, podName volumetypes.UniquePodName) error { return remountRequiredError{ volumeName: volumeName, podName: podName, } } // fsResizeRequiredError is an error returned when PodExistsInVolume() found // volume/pod attached/mounted but fsResizeRequired was true, indicating the // given volume receives an resize request after attached/mounted. type fsResizeRequiredError struct { volumeName v1.UniqueVolumeName podName volumetypes.UniquePodName } func (err fsResizeRequiredError) Error() string { return fmt.Sprintf( "volumeName %q mounted to %q needs to resize file system", err.volumeName, err.podName) } func newFsResizeRequiredError( volumeName v1.UniqueVolumeName, podName volumetypes.UniquePodName) error { return fsResizeRequiredError{ volumeName: volumeName, podName: podName, } } // IsFSResizeRequiredError returns true if the specified error is a // fsResizeRequiredError. func IsFSResizeRequiredError(err error) bool { _, ok := err.(fsResizeRequiredError) return ok } // getMountedVolume constructs and returns a MountedVolume object from the given // mountedPod and attachedVolume objects. func getMountedVolume( mountedPod *mountedPod, attachedVolume *attachedVolume) MountedVolume { return MountedVolume{ MountedVolume: operationexecutor.MountedVolume{ PodName: mountedPod.podName, VolumeName: attachedVolume.volumeName, InnerVolumeSpecName: mountedPod.volumeSpec.Name(), OuterVolumeSpecName: mountedPod.outerVolumeSpecName, PluginName: attachedVolume.pluginName, PodUID: mountedPod.podUID, Mounter: mountedPod.mounter, BlockVolumeMapper: mountedPod.blockVolumeMapper, VolumeGidValue: mountedPod.volumeGidValue, VolumeSpec: mountedPod.volumeSpec, DeviceMountPath: attachedVolume.deviceMountPath}} }
apache-2.0
radiumray/Mixly_Arduino
mixly_arduino/blockly/msg/js/sv.js
31095
// This file was automatically generated. Do not modify. 'use strict'; goog.provide('Blockly.Msg.sv'); goog.require('Blockly.Msg'); Blockly.Msg.ADD_COMMENT = "Lägg till kommentar"; Blockly.Msg.AUTH = "Var god godkänn denna app för att du ska kunna spara och dela den."; Blockly.Msg.CHANGE_VALUE_TITLE = "Ändra värde:"; Blockly.Msg.CHAT = "Chatta med din medarbetare genom att skriva i detta fält."; Blockly.Msg.COLLAPSE_ALL = "Fäll ihop block"; Blockly.Msg.COLLAPSE_BLOCK = "Fäll ihop block"; Blockly.Msg.COLOUR_BLEND_COLOUR1 = "färg 1"; Blockly.Msg.COLOUR_BLEND_COLOUR2 = "färg 2"; Blockly.Msg.COLOUR_BLEND_HELPURL = "http://meyerweb.com/eric/tools/color-blend/"; Blockly.Msg.COLOUR_BLEND_RATIO = "förhållande"; Blockly.Msg.COLOUR_BLEND_TITLE = "blanda"; Blockly.Msg.COLOUR_BLEND_TOOLTIP = "Blandar ihop två färger med ett bestämt förhållande (0.0 - 1.0)."; Blockly.Msg.COLOUR_PICKER_HELPURL = "https://sv.wikipedia.org/wiki/Färg"; Blockly.Msg.COLOUR_PICKER_TOOLTIP = "Välj en färg från paletten."; Blockly.Msg.COLOUR_RANDOM_HELPURL = "http://randomcolour.com"; // untranslated Blockly.Msg.COLOUR_RANDOM_TITLE = "slumpfärg"; Blockly.Msg.COLOUR_RANDOM_TOOLTIP = "Slumpa fram en färg."; Blockly.Msg.COLOUR_RGB_BLUE = "blå"; Blockly.Msg.COLOUR_RGB_GREEN = "grön"; Blockly.Msg.COLOUR_RGB_HELPURL = "http://www.december.com/html/spec/colorper.html"; Blockly.Msg.COLOUR_RGB_RED = "röd"; Blockly.Msg.COLOUR_RGB_TITLE = "färg med"; Blockly.Msg.COLOUR_RGB_TOOLTIP = "Skapa en färg med det angivna mängden röd, grön och blå. Alla värden måste vara mellan 0 och 100."; Blockly.Msg.CONTROLS_FLOW_STATEMENTS_HELPURL = "https://github.com/google/blockly/wiki/Loops#loop-termination-blocks"; // untranslated Blockly.Msg.CONTROLS_FLOW_STATEMENTS_OPERATOR_BREAK = "bryt ut ur loop"; Blockly.Msg.CONTROLS_FLOW_STATEMENTS_OPERATOR_CONTINUE = "fortsätta med nästa upprepning av loop"; Blockly.Msg.CONTROLS_FLOW_STATEMENTS_TOOLTIP_BREAK = "Bryta ut ur den innehållande upprepningen."; Blockly.Msg.CONTROLS_FLOW_STATEMENTS_TOOLTIP_CONTINUE = "Hoppa över resten av denna loop och fortsätt med nästa loop."; Blockly.Msg.CONTROLS_FLOW_STATEMENTS_WARNING = "Varning: Detta block kan endast användas i en loop."; Blockly.Msg.CONTROLS_FOREACH_HELPURL = "https://github.com/google/blockly/wiki/Loops#for-each för varje-block"; Blockly.Msg.CONTROLS_FOREACH_INPUT_INLIST = "i listan"; Blockly.Msg.CONTROLS_FOREACH_INPUT_INLIST_TAIL = ""; // untranslated Blockly.Msg.CONTROLS_FOREACH_INPUT_ITEM = "för varje föremål"; Blockly.Msg.CONTROLS_FOREACH_TOOLTIP = "För varje objekt i en lista, ange variabeln '%1' till objektet, och utför sedan några kommandon."; Blockly.Msg.CONTROLS_FOR_HELPURL = "https://github.com/google/blockly/wiki/Loops#count-with"; // untranslated Blockly.Msg.CONTROLS_FOR_INPUT_FROM_TO = "from %1 to %2"; // untranslated Blockly.Msg.CONTROLS_FOR_INPUT_FROM_TO_BY = "från %1 till %2 med %3"; Blockly.Msg.CONTROLS_FOR_INPUT_WITH = "räkna med"; Blockly.Msg.CONTROLS_FOR_TOOLTIP = "Låt variabeln %1 ta värden från starttalet till sluttalet, beräknat med det angivna intervallet, och utför de angivna blocken."; Blockly.Msg.CONTROLS_IF_ELSEIF_TOOLTIP = "Lägg till ett villkor blocket \"om\"."; Blockly.Msg.CONTROLS_IF_ELSE_TOOLTIP = "Lägg till ett sista villkor som täcker alla alternativ som är kvar för \"if\"-blocket."; Blockly.Msg.CONTROLS_IF_HELPURL = "https://github.com/google/blockly/wiki/IfElse"; // untranslated Blockly.Msg.CONTROLS_IF_IF_TOOLTIP = "Lägg till, ta bort eller ändra ordningen för sektioner för att omkonfigurera blocket \"om\"."; Blockly.Msg.CONTROLS_IF_MSG_ELSE = "annars"; Blockly.Msg.CONTROLS_IF_MSG_ELSEIF = "annars om"; Blockly.Msg.CONTROLS_IF_MSG_IF = "om"; Blockly.Msg.CONTROLS_IF_TOOLTIP_1 = "Om ett värde är sant, utför några kommandon."; Blockly.Msg.CONTROLS_IF_TOOLTIP_2 = "Om värdet är sant, utför det första kommandoblocket. Annars utför det andra kommandoblocket."; Blockly.Msg.CONTROLS_IF_TOOLTIP_3 = "Om det första värdet är sant, utför det första kommandoblocket. Annars, om det andra värdet är sant, utför det andra kommandoblocket."; Blockly.Msg.CONTROLS_IF_TOOLTIP_4 = "Om det första värdet är sant, utför det första kommandoblocket. Annars, om det andra värdet är sant, utför det andra kommandoblocket. Om ingen av värdena är sanna, utför det sista kommandoblocket."; Blockly.Msg.CONTROLS_REPEAT_HELPURL = "https://en.wikipedia.org/wiki/For_loop"; Blockly.Msg.CONTROLS_REPEAT_INPUT_DO = "utför"; Blockly.Msg.CONTROLS_REPEAT_TITLE = "upprepa %1 gånger"; Blockly.Msg.CONTROLS_REPEAT_TITLE_REPEAT = "upprepa"; Blockly.Msg.CONTROLS_REPEAT_TITLE_TIMES = "gånger"; Blockly.Msg.CONTROLS_REPEAT_TOOLTIP = "Utför några kommandon flera gånger."; Blockly.Msg.CONTROLS_WHILEUNTIL_HELPURL = "https://github.com/google/blockly/wiki/Loops#repeat"; // untranslated Blockly.Msg.CONTROLS_WHILEUNTIL_OPERATOR_UNTIL = "upprepa tills"; Blockly.Msg.CONTROLS_WHILEUNTIL_OPERATOR_WHILE = "upprepa medan"; Blockly.Msg.CONTROLS_WHILEUNTIL_TOOLTIP_UNTIL = "Medan ett värde är falskt, utför några kommandon."; Blockly.Msg.CONTROLS_WHILEUNTIL_TOOLTIP_WHILE = "Medan ett värde är sant, utför några kommandon."; Blockly.Msg.DELETE_BLOCK = "Radera block"; Blockly.Msg.DELETE_X_BLOCKS = "Radera %1 block"; Blockly.Msg.DISABLE_BLOCK = "Inaktivera block"; Blockly.Msg.DUPLICATE_BLOCK = "Duplicera"; Blockly.Msg.ENABLE_BLOCK = "Aktivera block"; Blockly.Msg.EXPAND_ALL = "Fäll ut block"; Blockly.Msg.EXPAND_BLOCK = "Fäll ut block"; Blockly.Msg.EXTERNAL_INPUTS = "Externa inmatningar"; Blockly.Msg.HELP = "Hjälp"; Blockly.Msg.INLINE_INPUTS = "Radinmatning"; Blockly.Msg.LISTS_CREATE_EMPTY_HELPURL = "https://en.wikipedia.org/wiki/Linked_list#Empty_lists"; Blockly.Msg.LISTS_CREATE_EMPTY_TITLE = "skapa tom lista"; Blockly.Msg.LISTS_CREATE_EMPTY_TOOLTIP = "Ger tillbaka en lista utan någon data, alltså med längden 0"; Blockly.Msg.LISTS_CREATE_WITH_CONTAINER_TITLE_ADD = "lista"; Blockly.Msg.LISTS_CREATE_WITH_CONTAINER_TOOLTIP = "Lägg till, ta bort eller ändra ordningen på objekten för att göra om det här \"list\"-blocket."; Blockly.Msg.LISTS_CREATE_WITH_HELPURL = "https://github.com/google/blockly/wiki/Lists#create-list-with"; Blockly.Msg.LISTS_CREATE_WITH_INPUT_WITH = "skapa lista med"; Blockly.Msg.LISTS_CREATE_WITH_ITEM_TOOLTIP = "Lägg till ett föremål till listan."; Blockly.Msg.LISTS_CREATE_WITH_TOOLTIP = "Skapa en lista med valfritt antal föremål."; Blockly.Msg.LISTS_GET_INDEX_FIRST = "första"; Blockly.Msg.LISTS_GET_INDEX_FROM_END = "# från slutet"; Blockly.Msg.LISTS_GET_INDEX_FROM_START = "#"; Blockly.Msg.LISTS_GET_INDEX_GET = "hämta"; Blockly.Msg.LISTS_GET_INDEX_GET_REMOVE = "hämta och ta bort"; Blockly.Msg.LISTS_GET_INDEX_LAST = "sista"; Blockly.Msg.LISTS_GET_INDEX_RANDOM = "slumpad"; Blockly.Msg.LISTS_GET_INDEX_REMOVE = "ta bort"; Blockly.Msg.LISTS_GET_INDEX_TAIL = ""; // untranslated Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FIRST = "Returnerar det första objektet i en lista."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FROM_END = "Ger tillbaka objektet på den efterfrågade positionen i en lista. #1 är det sista objektet."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FROM_START = "Ger tillbaka objektet på den efterfrågade positionen i en lista. #1 är det första objektet."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_LAST = "Returnerar det sista objektet i en lista."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_RANDOM = "Returnerar ett slumpmässigt objekt i en lista."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FIRST = "Tar bort och återställer det första objektet i en lista."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FROM_END = "Tar bort och återställer objektet på den specificerade positionen i en lista. #1 är det sista objektet."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FROM_START = "Tar bort och återställer objektet på den specificerade positionen i en lista. #1 är det första objektet."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_LAST = "Tar bort och återställer det sista objektet i en lista."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_RANDOM = "Tar bort och återställer ett slumpmässigt objekt i en lista."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FIRST = "Tar bort det första objektet i en lista."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FROM_END = "Tar bort objektet på den efterfrågade positionen i en lista. #1 är det sista objektet."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FROM_START = "Tar bort objektet på den specificerade positionen i en lista. #1 är det första objektet."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_LAST = "Tar bort det sista objektet i en lista."; Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_RANDOM = "Tar bort en slumpmässig post i en lista."; Blockly.Msg.LISTS_GET_SUBLIST_END_FROM_END = "till # från slutet"; Blockly.Msg.LISTS_GET_SUBLIST_END_FROM_START = "till #"; Blockly.Msg.LISTS_GET_SUBLIST_END_LAST = "till sista"; Blockly.Msg.LISTS_GET_SUBLIST_HELPURL = "https://github.com/google/blockly/wiki/Lists#getting-a-sublist"; // untranslated Blockly.Msg.LISTS_GET_SUBLIST_START_FIRST = "få underlista från första"; Blockly.Msg.LISTS_GET_SUBLIST_START_FROM_END = "få underlista från # från slutet"; Blockly.Msg.LISTS_GET_SUBLIST_START_FROM_START = "få underlista från #"; Blockly.Msg.LISTS_GET_SUBLIST_TAIL = ""; // untranslated Blockly.Msg.LISTS_GET_SUBLIST_TOOLTIP = "Skapar en kopia av den specificerade delen av en lista."; Blockly.Msg.LISTS_INDEX_OF_FIRST = "hitta första förekomsten av objektet"; Blockly.Msg.LISTS_INDEX_OF_HELPURL = "https://github.com/google/blockly/wiki/Lists#getting-items-from-a-list"; // untranslated Blockly.Msg.LISTS_INDEX_OF_LAST = "hitta sista förekomsten av objektet"; Blockly.Msg.LISTS_INDEX_OF_TOOLTIP = "Ger tillbaka den första/sista förekomsten av objektet i listan. Ger tillbaka 0 om texten inte hittas."; Blockly.Msg.LISTS_INLIST = "i listan"; Blockly.Msg.LISTS_IS_EMPTY_HELPURL = "https://github.com/google/blockly/wiki/Lists#is-empty"; // untranslated Blockly.Msg.LISTS_IS_EMPTY_TITLE = "%1 är tom"; Blockly.Msg.LISTS_LENGTH_HELPURL = "https://github.com/google/blockly/wiki/Lists#length-of"; // untranslated Blockly.Msg.LISTS_LENGTH_TITLE = "längden på %1"; Blockly.Msg.LISTS_LENGTH_TOOLTIP = "Returnerar längden på en lista."; Blockly.Msg.LISTS_REPEAT_HELPURL = "https://github.com/google/blockly/wiki/Lists#create-list-with"; // untranslated Blockly.Msg.LISTS_REPEAT_TITLE = "skapa lista med föremålet %1 upprepat %2 gånger"; Blockly.Msg.LISTS_REPEAT_TOOLTIP = "Skapar en lista som innehåller ett valt värde upprepat ett bestämt antalet gånger."; Blockly.Msg.LISTS_SET_INDEX_HELPURL = "https://github.com/google/blockly/wiki/Lists#in-list--set"; // untranslated Blockly.Msg.LISTS_SET_INDEX_INPUT_TO = "som"; Blockly.Msg.LISTS_SET_INDEX_INSERT = "Sätt in vid"; Blockly.Msg.LISTS_SET_INDEX_SET = "ange"; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FIRST = "sätter in objektet i början av en lista."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FROM_END = "sätter in objektet vid en specificerad position i en lista. #1 är det sista objektet."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FROM_START = "Sätter in objektet vid en specificerad position i en lista. #1 är det första objektet."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_LAST = "Lägg till objektet i slutet av en lista."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_RANDOM = "sätter in objektet på en slumpad position i en lista."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FIRST = "Anger det första objektet i en lista."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FROM_END = "Sätter in objektet vid en specificerad position i en lista. #1 är det sista objektet."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FROM_START = "Sätter in objektet vid en specificerad position i en lista. #1 är det första objektet."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_LAST = "Anger det sista elementet i en lista."; Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_RANDOM = "Sätter in ett slumpat objekt i en lista."; Blockly.Msg.LISTS_SPLIT_HELPURL = "https://github.com/google/blockly/wiki/Lists#splitting-strings-and-joining-lists"; Blockly.Msg.LISTS_SPLIT_LIST_FROM_TEXT = "skapa lista från text"; Blockly.Msg.LISTS_SPLIT_TEXT_FROM_LIST = "skapa text från lista"; Blockly.Msg.LISTS_SPLIT_TOOLTIP_JOIN = "Sammanfoga en textlista till en text, som separeras av en avgränsare."; Blockly.Msg.LISTS_SPLIT_TOOLTIP_SPLIT = "Dela upp text till en textlista och bryt vid varje avgränsare."; Blockly.Msg.LISTS_SPLIT_WITH_DELIMITER = "med avgränsare"; Blockly.Msg.LISTS_TOOLTIP = "Returnerar sant om listan är tom."; Blockly.Msg.LOGIC_BOOLEAN_FALSE = "falskt"; Blockly.Msg.LOGIC_BOOLEAN_HELPURL = "https://github.com/google/blockly/wiki/Logic#values"; // untranslated Blockly.Msg.LOGIC_BOOLEAN_TOOLTIP = "Returnerar antingen sant eller falskt."; Blockly.Msg.LOGIC_BOOLEAN_TRUE = "sant"; Blockly.Msg.LOGIC_COMPARE_HELPURL = "https://sv.wikipedia.org/wiki/Olikhet"; Blockly.Msg.LOGIC_COMPARE_TOOLTIP_EQ = "Ger tillbaka sant om båda värdena är lika med varandra."; Blockly.Msg.LOGIC_COMPARE_TOOLTIP_GT = "Ger tillbaka sant om det första värdet är större än det andra."; Blockly.Msg.LOGIC_COMPARE_TOOLTIP_GTE = "Ger tillbaka sant om det första värdet är större än eller lika med det andra."; Blockly.Msg.LOGIC_COMPARE_TOOLTIP_LT = "Ger tillbaka sant om det första värdet är mindre än det andra."; Blockly.Msg.LOGIC_COMPARE_TOOLTIP_LTE = "Ger tillbaka sant om det första värdet är mindre än eller lika med det andra."; Blockly.Msg.LOGIC_COMPARE_TOOLTIP_NEQ = "Ger tillbaka sant om båda värdena inte är lika med varandra."; Blockly.Msg.LOGIC_NEGATE_HELPURL = "https://github.com/google/blockly/wiki/Logic#not"; // untranslated Blockly.Msg.LOGIC_NEGATE_TITLE = "inte %1"; Blockly.Msg.LOGIC_NEGATE_TOOLTIP = "Ger tillbaka sant om inmatningen är falsk. Ger tillbaka falskt och inmatningen är sann."; Blockly.Msg.LOGIC_NULL = "null"; Blockly.Msg.LOGIC_NULL_HELPURL = "https://sv.wikipedia.org/wiki/Null"; Blockly.Msg.LOGIC_NULL_TOOLTIP = "Returnerar null."; Blockly.Msg.LOGIC_OPERATION_AND = "och"; Blockly.Msg.LOGIC_OPERATION_HELPURL = "https://github.com/google/blockly/wiki/Logic#logical-operations"; // untranslated Blockly.Msg.LOGIC_OPERATION_OR = "eller"; Blockly.Msg.LOGIC_OPERATION_TOOLTIP_AND = "Ger tillbaka sant om båda värdena är sanna."; Blockly.Msg.LOGIC_OPERATION_TOOLTIP_OR = "Ger tillbaka sant om minst ett av värdena är sant."; Blockly.Msg.LOGIC_TERNARY_CONDITION = "test"; Blockly.Msg.LOGIC_TERNARY_HELPURL = "https://en.wikipedia.org/wiki/%3F:"; Blockly.Msg.LOGIC_TERNARY_IF_FALSE = "om falskt"; Blockly.Msg.LOGIC_TERNARY_IF_TRUE = "om sant"; Blockly.Msg.LOGIC_TERNARY_TOOLTIP = "Kontrollera villkoret i \"test\". Om villkoret är sant, ge tillbaka \"om sant\"-värdet; annars ge tillbaka \"om falskt\"-värdet."; Blockly.Msg.MATH_ADDITION_SYMBOL = "+"; Blockly.Msg.MATH_ARITHMETIC_HELPURL = "https://sv.wikipedia.org/wiki/Aritmetik"; Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_ADD = "Returnerar summan av de två talen."; Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_DIVIDE = "Returnerar kvoten av de två talen."; Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_MINUS = "Returnerar differensen mellan de två talen."; Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_MULTIPLY = "Returnerar produkten av de två talen."; Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_POWER = "Ger tillbaka det första talet upphöjt till det andra talet."; Blockly.Msg.MATH_CHANGE_HELPURL = "https://en.wikipedia.org/wiki/Programming_idiom#Incrementing_a_counter"; Blockly.Msg.MATH_CHANGE_INPUT_BY = "med"; Blockly.Msg.MATH_CHANGE_TITLE_CHANGE = "ändra"; Blockly.Msg.MATH_CHANGE_TOOLTIP = "Lägg till ett tal till variabeln '%1'."; Blockly.Msg.MATH_CONSTANT_HELPURL = "https://sv.wikipedia.org/wiki/Matematisk_konstant"; Blockly.Msg.MATH_CONSTANT_TOOLTIP = "Returnerar en av de vanliga konstanterna: π (3.141…), e (2.718…), φ (1.618…), sqrt(2) (1.414…), sqrt(½) (0.707…) eller ∞ (oändligt)."; Blockly.Msg.MATH_CONSTRAIN_HELPURL = "https://en.wikipedia.org/wiki/Clamping_%28graphics%29"; Blockly.Msg.MATH_CONSTRAIN_TITLE = "begränsa %1 till mellan %2 och %3"; Blockly.Msg.MATH_CONSTRAIN_TOOLTIP = "Begränsa ett tal till att mellan de angivna gränsvärden (inklusive)."; Blockly.Msg.MATH_DIVISION_SYMBOL = "÷"; Blockly.Msg.MATH_IS_DIVISIBLE_BY = "är delbart med"; Blockly.Msg.MATH_IS_EVEN = "är jämnt"; Blockly.Msg.MATH_IS_NEGATIVE = "är negativt"; Blockly.Msg.MATH_IS_ODD = "är ojämnt"; Blockly.Msg.MATH_IS_POSITIVE = "är positivt"; Blockly.Msg.MATH_IS_PRIME = "är ett primtal"; Blockly.Msg.MATH_IS_TOOLTIP = "Kontrollera om ett tal är jämnt, ojämnt, helt, positivt, negativt eller det är delbart med ett bestämt tal. Returnerar med sant eller falskt."; Blockly.Msg.MATH_IS_WHOLE = "är helt"; Blockly.Msg.MATH_MODULO_HELPURL = "https://en.wikipedia.org/wiki/Modulo_operation"; Blockly.Msg.MATH_MODULO_TITLE = "resten av %1 ÷ %2"; Blockly.Msg.MATH_MODULO_TOOLTIP = "Returnerar kvoten från divisionen av de två talen."; Blockly.Msg.MATH_MULTIPLICATION_SYMBOL = "×"; Blockly.Msg.MATH_NUMBER_HELPURL = "https://sv.wikipedia.org/wiki/Tal"; Blockly.Msg.MATH_NUMBER_TOOLTIP = "Ett tal."; Blockly.Msg.MATH_ONLIST_HELPURL = ""; // untranslated Blockly.Msg.MATH_ONLIST_OPERATOR_AVERAGE = "medelvärdet av listan"; Blockly.Msg.MATH_ONLIST_OPERATOR_MAX = "högsta talet i listan"; Blockly.Msg.MATH_ONLIST_OPERATOR_MEDIAN = "medianen av listan"; Blockly.Msg.MATH_ONLIST_OPERATOR_MIN = "minsta talet i listan"; Blockly.Msg.MATH_ONLIST_OPERATOR_MODE = "typvärdet i listan"; Blockly.Msg.MATH_ONLIST_OPERATOR_RANDOM = "slumpmässigt objekt i listan"; Blockly.Msg.MATH_ONLIST_OPERATOR_STD_DEV = "standardavvikelsen i listan"; Blockly.Msg.MATH_ONLIST_OPERATOR_SUM = "summan av listan"; Blockly.Msg.MATH_ONLIST_TOOLTIP_AVERAGE = "Ger tillbaka medelvärdet (aritmetiskt) av de numeriska värdena i listan."; Blockly.Msg.MATH_ONLIST_TOOLTIP_MAX = "Ger tillbaka det största talet i listan."; Blockly.Msg.MATH_ONLIST_TOOLTIP_MEDIAN = "Returnerar medianen av talen i listan."; Blockly.Msg.MATH_ONLIST_TOOLTIP_MIN = "Ger tillbaka det minsta talet i listan."; Blockly.Msg.MATH_ONLIST_TOOLTIP_MODE = "Ger tillbaka en lista med de(t) vanligaste objekte(t/n) i listan."; Blockly.Msg.MATH_ONLIST_TOOLTIP_RANDOM = "Returnerar ett slumpmässigt element från listan."; Blockly.Msg.MATH_ONLIST_TOOLTIP_STD_DEV = "Ger tillbaka standardavvikelsen i listan."; Blockly.Msg.MATH_ONLIST_TOOLTIP_SUM = "Ger tillbaka summan av alla talen i listan."; Blockly.Msg.MATH_POWER_SYMBOL = "^"; Blockly.Msg.MATH_RANDOM_FLOAT_HELPURL = "https://sv.wikipedia.org/wiki/Slumptalsgenerator"; Blockly.Msg.MATH_RANDOM_FLOAT_TITLE_RANDOM = "slumpat decimaltal"; Blockly.Msg.MATH_RANDOM_FLOAT_TOOLTIP = "Ger tillbaka ett slumpat decimaltal mellan 0.0 (inkluderat) och 1.0 (exkluderat)."; Blockly.Msg.MATH_RANDOM_INT_HELPURL = "https://sv.wikipedia.org/wiki/Slumptalsgenerator"; Blockly.Msg.MATH_RANDOM_INT_TITLE = "slumpartat heltal från %1 till %2"; Blockly.Msg.MATH_RANDOM_INT_TOOLTIP = "Ger tillbaka ett slumpat heltal mellan två värden (inklusive)."; Blockly.Msg.MATH_ROUND_HELPURL = "https://sv.wikipedia.org/wiki/Avrundning"; Blockly.Msg.MATH_ROUND_OPERATOR_ROUND = "avrunda"; Blockly.Msg.MATH_ROUND_OPERATOR_ROUNDDOWN = "avrunda nedåt"; Blockly.Msg.MATH_ROUND_OPERATOR_ROUNDUP = "avrunda uppåt"; Blockly.Msg.MATH_ROUND_TOOLTIP = "Avrunda ett tal uppåt eller nedåt."; Blockly.Msg.MATH_SINGLE_HELPURL = "https://sv.wikipedia.org/wiki/Kvadratrot"; Blockly.Msg.MATH_SINGLE_OP_ABSOLUTE = "absolut"; Blockly.Msg.MATH_SINGLE_OP_ROOT = "kvadratrot"; Blockly.Msg.MATH_SINGLE_TOOLTIP_ABS = "Returnerar absolutvärdet av ett tal."; Blockly.Msg.MATH_SINGLE_TOOLTIP_EXP = "Ger tillbaka e upphöjt i ett tal."; Blockly.Msg.MATH_SINGLE_TOOLTIP_LN = "Returnera den naturliga logaritmen av ett tal."; Blockly.Msg.MATH_SINGLE_TOOLTIP_LOG10 = "Returnerar logaritmen för bas 10 av ett tal."; Blockly.Msg.MATH_SINGLE_TOOLTIP_NEG = "Returnerar negationen av ett tal."; Blockly.Msg.MATH_SINGLE_TOOLTIP_POW10 = "Ger tillbaka 10 upphöjt i ett tal."; Blockly.Msg.MATH_SINGLE_TOOLTIP_ROOT = "Returnerar kvadratroten av ett tal."; Blockly.Msg.MATH_SUBTRACTION_SYMBOL = "-"; Blockly.Msg.MATH_TRIG_ACOS = "arccos"; Blockly.Msg.MATH_TRIG_ASIN = "arcsin"; Blockly.Msg.MATH_TRIG_ATAN = "arctan"; Blockly.Msg.MATH_TRIG_COS = "cos"; Blockly.Msg.MATH_TRIG_HELPURL = "https://sv.wikipedia.org/wiki/Trigonometrisk_funktion"; Blockly.Msg.MATH_TRIG_SIN = "sin"; Blockly.Msg.MATH_TRIG_TAN = "tan"; Blockly.Msg.MATH_TRIG_TOOLTIP_ACOS = "Ger tillbaka arcus cosinus (arccos) för ett tal."; Blockly.Msg.MATH_TRIG_TOOLTIP_ASIN = "Ger tillbaka arcus sinus (arcsin) för ett tal."; Blockly.Msg.MATH_TRIG_TOOLTIP_ATAN = "Ger tillbaka arcus tangens (arctan) av ett tal."; Blockly.Msg.MATH_TRIG_TOOLTIP_COS = "Ger tillbaka cosinus för en grad (inte radian)."; Blockly.Msg.MATH_TRIG_TOOLTIP_SIN = "Ger tillbaka sinus för en grad (inte radian)."; Blockly.Msg.MATH_TRIG_TOOLTIP_TAN = "Ger tillbaka tangens för en grad (inte radian)."; Blockly.Msg.ME = "Jag"; Blockly.Msg.NEW_VARIABLE = "Ny variabel..."; Blockly.Msg.NEW_VARIABLE_TITLE = "Nytt variabelnamn:"; Blockly.Msg.ORDINAL_NUMBER_SUFFIX = ""; // untranslated Blockly.Msg.PROCEDURES_ALLOW_STATEMENTS = "tillåta uttalanden"; Blockly.Msg.PROCEDURES_BEFORE_PARAMS = "med:"; Blockly.Msg.PROCEDURES_CALLNORETURN_CALL = ""; // untranslated Blockly.Msg.PROCEDURES_CALLNORETURN_HELPURL = "https://sv.wikipedia.org/wiki/Funktion_%28programmering%29"; Blockly.Msg.PROCEDURES_CALLNORETURN_TOOLTIP = "Kör den användardefinierade funktionen \"%1\"."; Blockly.Msg.PROCEDURES_CALLRETURN_HELPURL = "https://sv.wikipedia.org/wiki/Funktion_%28programmering%29"; Blockly.Msg.PROCEDURES_CALLRETURN_TOOLTIP = "Kör den användardefinierade funktionen \"%1\" och använd resultatet av den."; Blockly.Msg.PROCEDURES_CALL_BEFORE_PARAMS = "med:"; Blockly.Msg.PROCEDURES_CREATE_DO = "Skapa '%1'"; Blockly.Msg.PROCEDURES_DEFNORETURN_DO = ""; // untranslated Blockly.Msg.PROCEDURES_DEFNORETURN_HELPURL = "https://sv.wikipedia.org/wiki/Funktion_%28programmering%29"; Blockly.Msg.PROCEDURES_DEFNORETURN_PROCEDURE = "göra något"; Blockly.Msg.PROCEDURES_DEFNORETURN_TITLE = "för att"; Blockly.Msg.PROCEDURES_DEFNORETURN_TOOLTIP = "Skapar en funktion utan output."; Blockly.Msg.PROCEDURES_DEFRETURN_HELPURL = "https://sv.wikipedia.org/wiki/Funktion_%28programmering%29"; Blockly.Msg.PROCEDURES_DEFRETURN_RETURN = "returnera"; Blockly.Msg.PROCEDURES_DEFRETURN_TOOLTIP = "Skapar en funktion med output."; Blockly.Msg.PROCEDURES_DEF_DUPLICATE_WARNING = "Varning: Denna funktion har dubbla parametrar."; Blockly.Msg.PROCEDURES_HIGHLIGHT_DEF = "Markera funktionsdefinition"; Blockly.Msg.PROCEDURES_IFRETURN_TOOLTIP = "Om ett värde är sant returneras ett andra värde."; Blockly.Msg.PROCEDURES_IFRETURN_WARNING = "Varning: Detta block får användas endast i en funktionsdefinition."; Blockly.Msg.PROCEDURES_MUTATORARG_TITLE = "inmatningsnamn:"; Blockly.Msg.PROCEDURES_MUTATORARG_TOOLTIP = "Lägg till en inmatning till funktionen."; Blockly.Msg.PROCEDURES_MUTATORCONTAINER_TITLE = "inmatningar"; Blockly.Msg.PROCEDURES_MUTATORCONTAINER_TOOLTIP = "Lägg till, ta bort och ändra ordningen för inmatningar till denna funktion."; Blockly.Msg.REMOVE_COMMENT = "Radera kommentar"; Blockly.Msg.RENAME_VARIABLE = "Byt namn på variabel..."; Blockly.Msg.RENAME_VARIABLE_TITLE = "Byt namn på alla'%1'-variabler till:"; Blockly.Msg.TEXT_APPEND_APPENDTEXT = "lägg till text"; Blockly.Msg.TEXT_APPEND_HELPURL = "https://github.com/google/blockly/wiki/Text#text-modification"; // untranslated Blockly.Msg.TEXT_APPEND_TO = "till"; Blockly.Msg.TEXT_APPEND_TOOLTIP = "Lägg till lite text till variabeln '%1'."; Blockly.Msg.TEXT_CHANGECASE_HELPURL = "https://github.com/google/blockly/wiki/Text#adjusting-text-case"; // untranslated Blockly.Msg.TEXT_CHANGECASE_OPERATOR_LOWERCASE = "till gemener"; Blockly.Msg.TEXT_CHANGECASE_OPERATOR_TITLECASE = "till Versala Initialer"; Blockly.Msg.TEXT_CHANGECASE_OPERATOR_UPPERCASE = "till VERSALER"; Blockly.Msg.TEXT_CHANGECASE_TOOLTIP = "Returnerar en kopia av texten i ett annat skiftläge."; Blockly.Msg.TEXT_CHARAT_FIRST = "hämta första bokstaven"; Blockly.Msg.TEXT_CHARAT_FROM_END = "hämta bokstaven # från slutet"; Blockly.Msg.TEXT_CHARAT_FROM_START = "hämta bokstaven #"; Blockly.Msg.TEXT_CHARAT_HELPURL = "https://github.com/google/blockly/wiki/Text#extracting-text"; // untranslated Blockly.Msg.TEXT_CHARAT_INPUT_INTEXT = "i texten"; Blockly.Msg.TEXT_CHARAT_LAST = "hämta sista bokstaven"; Blockly.Msg.TEXT_CHARAT_RANDOM = "hämta slumpad bokstav"; Blockly.Msg.TEXT_CHARAT_TAIL = ""; // untranslated Blockly.Msg.TEXT_CHARAT_TOOLTIP = "Ger tillbaka bokstaven på den specificerade positionen."; Blockly.Msg.TEXT_CREATE_JOIN_ITEM_TOOLTIP = "Lägg till ett föremål till texten."; Blockly.Msg.TEXT_CREATE_JOIN_TITLE_JOIN = "sammanfoga"; Blockly.Msg.TEXT_CREATE_JOIN_TOOLTIP = "Lägg till, ta bort eller ändra ordningen för sektioner för att omkonfigurera detta textblock."; Blockly.Msg.TEXT_GET_SUBSTRING_END_FROM_END = "till bokstav # från slutet"; Blockly.Msg.TEXT_GET_SUBSTRING_END_FROM_START = "till bokstav #"; Blockly.Msg.TEXT_GET_SUBSTRING_END_LAST = "till sista bokstaven"; Blockly.Msg.TEXT_GET_SUBSTRING_HELPURL = "https://github.com/google/blockly/wiki/Text#extracting-a-region-of-text"; // untranslated Blockly.Msg.TEXT_GET_SUBSTRING_INPUT_IN_TEXT = "i texten"; Blockly.Msg.TEXT_GET_SUBSTRING_START_FIRST = "få textdel från första bokstaven"; Blockly.Msg.TEXT_GET_SUBSTRING_START_FROM_END = "få textdel från bokstav # från slutet"; Blockly.Msg.TEXT_GET_SUBSTRING_START_FROM_START = "få textdel från bokstav #"; Blockly.Msg.TEXT_GET_SUBSTRING_TAIL = ""; // untranslated Blockly.Msg.TEXT_GET_SUBSTRING_TOOLTIP = "Ger tillbaka en viss del av texten."; Blockly.Msg.TEXT_INDEXOF_HELPURL = "https://github.com/google/blockly/wiki/Text#finding-text"; // untranslated Blockly.Msg.TEXT_INDEXOF_INPUT_INTEXT = "i texten"; Blockly.Msg.TEXT_INDEXOF_OPERATOR_FIRST = "hitta första förekomsten av texten"; Blockly.Msg.TEXT_INDEXOF_OPERATOR_LAST = "hitta sista förekomsten av texten"; Blockly.Msg.TEXT_INDEXOF_TAIL = ""; // untranslated Blockly.Msg.TEXT_INDEXOF_TOOLTIP = "Ger tillbaka indexet för den första/sista förekomsten av första texten i den andra texten. Ger tillbaka 0 om texten inte hittas."; Blockly.Msg.TEXT_ISEMPTY_HELPURL = "https://github.com/google/blockly/wiki/Text#checking-for-empty-text"; // untranslated Blockly.Msg.TEXT_ISEMPTY_TITLE = "%1 är tom"; Blockly.Msg.TEXT_ISEMPTY_TOOLTIP = "Returnerar sant om den angivna texten är tom."; Blockly.Msg.TEXT_JOIN_HELPURL = "https://github.com/google/blockly/wiki/Text#text-creation"; // untranslated Blockly.Msg.TEXT_JOIN_TITLE_CREATEWITH = "skapa text med"; Blockly.Msg.TEXT_JOIN_TOOLTIP = "Skapa en textbit genom att sammanfoga ett valfritt antal föremål."; Blockly.Msg.TEXT_LENGTH_HELPURL = "https://github.com/google/blockly/wiki/Text#text-modification"; // untranslated Blockly.Msg.TEXT_LENGTH_TITLE = "längden på %1"; Blockly.Msg.TEXT_LENGTH_TOOLTIP = "Ger tillbaka antalet bokstäver (inklusive mellanslag) i den angivna texten."; Blockly.Msg.TEXT_PRINT_HELPURL = "https://github.com/google/blockly/wiki/Text#printing-text"; // untranslated Blockly.Msg.TEXT_PRINT_TITLE = "skriv %1"; Blockly.Msg.TEXT_PRINT_TOOLTIP = "Skriv den angivna texten, talet eller annat värde."; Blockly.Msg.TEXT_PROMPT_HELPURL = "https://github.com/google/blockly/wiki/Text#getting-input-from-the-user"; // untranslated Blockly.Msg.TEXT_PROMPT_TOOLTIP_NUMBER = "Fråga användaren efter ett tal."; Blockly.Msg.TEXT_PROMPT_TOOLTIP_TEXT = "Fråga användaren efter lite text."; Blockly.Msg.TEXT_PROMPT_TYPE_NUMBER = "fråga efter ett tal med meddelande"; Blockly.Msg.TEXT_PROMPT_TYPE_TEXT = "fråga efter text med meddelande"; Blockly.Msg.TEXT_TEXT_HELPURL = "https://sv.wikipedia.org/wiki/Str%C3%A4ng_%28data%29"; Blockly.Msg.TEXT_TEXT_TOOLTIP = "En bokstav, ord eller textrad."; Blockly.Msg.TEXT_TRIM_HELPURL = "https://github.com/google/blockly/wiki/Text#trimming-removing-spaces"; // untranslated Blockly.Msg.TEXT_TRIM_OPERATOR_BOTH = "ta bort mellanrum från båda sidorna av"; Blockly.Msg.TEXT_TRIM_OPERATOR_LEFT = "ta bort mellanrum från vänstra sidan av"; Blockly.Msg.TEXT_TRIM_OPERATOR_RIGHT = "ta bort mellanrum från högra sidan av"; Blockly.Msg.TEXT_TRIM_TOOLTIP = "Returnerar en kopia av texten med borttagna mellanrum från en eller båda ändar."; Blockly.Msg.TODAY = "Today"; // untranslated Blockly.Msg.VARIABLES_DEFAULT_NAME = "föremål"; Blockly.Msg.VARIABLES_GET_CREATE_SET = "Skapa \"välj %1\""; Blockly.Msg.VARIABLES_GET_HELPURL = "https://github.com/google/blockly/wiki/Variables#get"; // untranslated Blockly.Msg.VARIABLES_GET_TAIL = ""; // untranslated Blockly.Msg.VARIABLES_GET_TITLE = ""; // untranslated Blockly.Msg.VARIABLES_GET_TOOLTIP = "Returnerar värdet av denna variabel."; Blockly.Msg.VARIABLES_SET_CREATE_GET = "Skapa 'hämta %1'"; Blockly.Msg.VARIABLES_SET_HELPURL = "https://github.com/google/blockly/wiki/Variables#set"; // untranslated Blockly.Msg.VARIABLES_SET_TAIL = "till"; Blockly.Msg.VARIABLES_SET_TITLE = "välj"; Blockly.Msg.VARIABLES_SET_TOOLTIP = "Gör så att den här variabeln blir lika med inputen."; Blockly.Msg.PROCEDURES_DEFRETURN_TITLE = Blockly.Msg.PROCEDURES_DEFNORETURN_TITLE; Blockly.Msg.LISTS_GET_SUBLIST_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST; Blockly.Msg.LISTS_SET_INDEX_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST; Blockly.Msg.PROCEDURES_DEFRETURN_PROCEDURE = Blockly.Msg.PROCEDURES_DEFNORETURN_PROCEDURE; Blockly.Msg.VARIABLES_SET_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME; Blockly.Msg.LISTS_CREATE_WITH_ITEM_TITLE = Blockly.Msg.VARIABLES_DEFAULT_NAME; Blockly.Msg.MATH_CHANGE_TITLE_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME; Blockly.Msg.VARIABLES_GET_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME; Blockly.Msg.PROCEDURES_DEFRETURN_DO = Blockly.Msg.PROCEDURES_DEFNORETURN_DO; Blockly.Msg.LISTS_GET_INDEX_HELPURL = Blockly.Msg.LISTS_INDEX_OF_HELPURL; Blockly.Msg.TEXT_CREATE_JOIN_ITEM_TITLE_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME; Blockly.Msg.CONTROLS_IF_MSG_THEN = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO; Blockly.Msg.LISTS_INDEX_OF_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST; Blockly.Msg.PROCEDURES_CALLRETURN_CALL = Blockly.Msg.PROCEDURES_CALLNORETURN_CALL; Blockly.Msg.LISTS_GET_INDEX_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST; Blockly.Msg.CONTROLS_FOR_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO; Blockly.Msg.CONTROLS_FOREACH_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO; Blockly.Msg.CONTROLS_IF_IF_TITLE_IF = Blockly.Msg.CONTROLS_IF_MSG_IF; Blockly.Msg.CONTROLS_WHILEUNTIL_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO; Blockly.Msg.CONTROLS_IF_ELSEIF_TITLE_ELSEIF = Blockly.Msg.CONTROLS_IF_MSG_ELSEIF; Blockly.Msg.TEXT_APPEND_VARIABLE = Blockly.Msg.VARIABLES_DEFAULT_NAME; Blockly.Msg.CONTROLS_IF_ELSE_TITLE_ELSE = Blockly.Msg.CONTROLS_IF_MSG_ELSE;
apache-2.0
smgoller/geode
geode-core/src/main/java/org/apache/geode/internal/cache/control/ResourceMonitor.java
1735
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.control; import java.util.Set; import org.apache.geode.internal.cache.control.ResourceAdvisor.ResourceManagerProfile; /** * Implemented by classes that the ResourceManager creates in order to monitor a specific type of * resource (heap memory, off-heap memory, disk, etc.). * * @since Geode 1.0 */ interface ResourceMonitor { /** * Ask the monitor to notify the given listeners of the given event. * * @param listeners Set of listeners of notify. * @param event Event to send to the listeners. */ void notifyListeners(final Set<ResourceListener<?>> listeners, final ResourceEvent event); /** * Ask the monitor to stop monitoring. */ void stopMonitoring(); /** * Populate the fields in the profile that are appropriate for this monitor. * * @param profile The profile to populate. */ void fillInProfile(final ResourceManagerProfile profile); }
apache-2.0
bulldog2011/nano-rest
sample/HelloEBayTrading/src/ebay/apis/eblbasecomponents/VerifyRelistItemResponseType.java
4812
// Generated by xsd compiler for android/java // DO NOT CHANGE! package ebay.apis.eblbasecomponents; import java.io.Serializable; import com.leansoft.nano.annotation.*; import java.util.List; import java.util.Date; /** * * Returns the estimated fees for the listing that is being verified for a re-list. * */ @RootElement(name = "VerifyRelistItemResponse", namespace = "urn:ebay:apis:eBLBaseComponents") public class VerifyRelistItemResponseType extends AbstractResponseType implements Serializable { private static final long serialVersionUID = -1L; @Element(name = "ItemID") private String itemID; @Element(name = "Fees") private FeesType fees; @Element(name = "StartTime") private Date startTime; @Element(name = "EndTime") private Date endTime; @Element(name = "DiscountReason") private List<DiscountReasonCodeType> discountReason; @Element(name = "ProductSuggestions") private ProductSuggestionsType productSuggestions; /** * public getter * * * Unique item ID for the new listing. As VerifyRelistItem does not * actually re-list an item, returns 0 instead of a normal item ID. * * * @returns java.lang.String */ public String getItemID() { return this.itemID; } /** * public setter * * * Unique item ID for the new listing. As VerifyRelistItem does not * actually re-list an item, returns 0 instead of a normal item ID. * * * @param java.lang.String */ public void setItemID(String itemID) { this.itemID = itemID; } /** * public getter * * * Child elements contain the estimated listing fees for the new item * listing. The fees do not include the Final Value Fee (FVF), which cannot * be determined until an item is sold. * * * @returns ebay.apis.eblbasecomponents.FeesType */ public FeesType getFees() { return this.fees; } /** * public setter * * * Child elements contain the estimated listing fees for the new item * listing. The fees do not include the Final Value Fee (FVF), which cannot * be determined until an item is sold. * * * @param ebay.apis.eblbasecomponents.FeesType */ public void setFees(FeesType fees) { this.fees = fees; } /** * public getter * * * Date and time the new listing became active on the eBay site. * * * @returns java.util.Date */ public Date getStartTime() { return this.startTime; } /** * public setter * * * Date and time the new listing became active on the eBay site. * * * @param java.util.Date */ public void setStartTime(Date startTime) { this.startTime = startTime; } /** * public getter * * * Date and time when the new listing ends. This is the starting time plus * the listing duration. * * * @returns java.util.Date */ public Date getEndTime() { return this.endTime; } /** * public setter * * * Date and time when the new listing ends. This is the starting time plus * the listing duration. * * * @param java.util.Date */ public void setEndTime(Date endTime) { this.endTime = endTime; } /** * public getter * * * The nature of the discount, if a discount would have applied * had this actually been listed at this time. * * * @returns java.util.List<ebay.apis.eblbasecomponents.DiscountReasonCodeType> */ public List<DiscountReasonCodeType> getDiscountReason() { return this.discountReason; } /** * public setter * * * The nature of the discount, if a discount would have applied * had this actually been listed at this time. * * * @param java.util.List<ebay.apis.eblbasecomponents.DiscountReasonCodeType> */ public void setDiscountReason(List<DiscountReasonCodeType> discountReason) { this.discountReason = discountReason; } /** * public getter * * * Provides a list of products recommended by eBay which match the item information * provided by the seller. * Not applicable to Half.com. * * * @returns ebay.apis.eblbasecomponents.ProductSuggestionsType */ public ProductSuggestionsType getProductSuggestions() { return this.productSuggestions; } /** * public setter * * * Provides a list of products recommended by eBay which match the item information * provided by the seller. * Not applicable to Half.com. * * * @param ebay.apis.eblbasecomponents.ProductSuggestionsType */ public void setProductSuggestions(ProductSuggestionsType productSuggestions) { this.productSuggestions = productSuggestions; } }
apache-2.0
ern/elasticsearch
x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java
7809
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ssl; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslConfiguration; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.core.ssl.SSLService; import javax.net.ssl.SSLException; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; import java.io.IOException; import java.net.SocketException; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Arrays; import java.util.concurrent.CountDownLatch; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; /** * Integration tests for SSL reloading support */ public class SSLReloadIntegTests extends SecurityIntegTestCase { private Path nodeKeyPath; private Path nodeCertPath; private Path clientCertPath; private Path updateableCertPath; @Override public Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { // Nodes start trusting testnode.crt and testclient.crt Path origKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); Path origCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); Path origClientCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"); Path tempDir = createTempDir(); try { if (nodeKeyPath == null) { nodeKeyPath = tempDir.resolve("testnode.pem"); Files.copy(origKeyPath, nodeKeyPath); } if (nodeCertPath == null) { nodeCertPath = tempDir.resolve("testnode.crt"); Files.copy(origCertPath, nodeCertPath); } if (clientCertPath == null) { clientCertPath = tempDir.resolve("testclient.crt"); Files.copy(origClientCertPath, clientCertPath); } // Placeholder trusted certificate that will be updated later on if (updateableCertPath == null) { updateableCertPath = tempDir.resolve("updateable.crt"); Files.copy(origCertPath, updateableCertPath); } } catch (IOException e) { throw new ElasticsearchException("failed to copy key or certificate", e); } Settings settings = super.nodeSettings(nodeOrdinal, otherSettings); Settings.Builder builder = Settings.builder() .put(settings.filter((s) -> s.startsWith("xpack.security.transport.ssl.") == false)); builder.put("path.home", createTempDir()) .put("xpack.security.transport.ssl.key", nodeKeyPath) .put("xpack.security.transport.ssl.key_passphrase", "testnode") .put("xpack.security.transport.ssl.certificate", nodeCertPath) .putList("xpack.security.transport.ssl.certificate_authorities", Arrays.asList(nodeCertPath.toString(), clientCertPath.toString(), updateableCertPath.toString())) .put("resource.reload.interval.high", "1s"); builder.put("xpack.security.transport.ssl.enabled", true); return builder.build(); } @Override protected boolean transportSSLEnabled() { return true; } public void testThatSSLConfigurationReloadsOnModification() throws Exception { assumeFalse("https://github.com/elastic/elasticsearch/issues/49094", inFipsJvm()); Path keyPath = createTempDir().resolve("testnode_updated.pem"); Path certPath = createTempDir().resolve("testnode_updated.crt"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), keyPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), certPath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() .put("path.home", createTempDir()) .put("xpack.security.transport.ssl.enabled", true) .put("xpack.security.transport.ssl.key", keyPath) .put("xpack.security.transport.ssl.certificate", certPath) .putList("xpack.security.transport.ssl.certificate_authorities", Arrays.asList(nodeCertPath.toString(), clientCertPath.toString(), updateableCertPath.toString())) .setSecureSettings(secureSettings) .build(); String node = randomFrom(internalCluster().getNodeNames()); SSLService sslService = new SSLService(TestEnvironment.newEnvironment(settings)); SslConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.security.transport.ssl"); SSLSocketFactory sslSocketFactory = sslService.sslSocketFactory(sslConfiguration); TransportAddress address = internalCluster() .getInstance(Transport.class, node).boundAddress().publishAddress(); // Fails as our nodes do not trust testnode_updated.crt try (SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(address.getAddress(), address.getPort())) { assertThat(socket.isConnected(), is(true)); socket.startHandshake(); if (socket.getSession().getProtocol().equals("TLSv1.3")) { // blocking read for TLSv1.3 to see if the other side closed the connection socket.getInputStream().read(); } fail("handshake should not have been successful!"); } catch (SSLException | SocketException expected) { logger.trace("expected exception", expected); } // Copy testnode_updated.crt to the placeholder updateable.crt so that the nodes will start trusting it now try { Files.move(certPath, updateableCertPath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE); } catch (AtomicMoveNotSupportedException e) { Files.move(certPath, updateableCertPath, StandardCopyOption.REPLACE_EXISTING); } CountDownLatch latch = new CountDownLatch(1); assertBusy(() -> { try (SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(address.getAddress(), address.getPort())) { logger.info("opened socket for reloading [{}]", socket); socket.addHandshakeCompletedListener(event -> { try { assertThat(event.getPeerPrincipal().getName(), containsString("Test Node")); logger.info("ssl handshake completed on port [{}]", event.getSocket().getLocalPort()); latch.countDown(); } catch (Exception e) { fail("caught exception in listener " + e.getMessage()); } }); socket.startHandshake(); } catch (Exception e) { fail("caught exception " + e.getMessage()); } }); latch.await(); } }
apache-2.0
bootstraponline-archive/gerrit-mirror
gerrit-server/src/main/java/com/google/gerrit/server/access/AccessCollection.java
1789
// Copyright (C) 2013 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.access; import com.google.gerrit.extensions.registration.DynamicMap; import com.google.gerrit.extensions.restapi.IdString; import com.google.gerrit.extensions.restapi.ResourceNotFoundException; import com.google.gerrit.extensions.restapi.RestCollection; import com.google.gerrit.extensions.restapi.RestView; import com.google.gerrit.extensions.restapi.TopLevelResource; import com.google.inject.Inject; import com.google.inject.Provider; public class AccessCollection implements RestCollection<TopLevelResource, AccessResource> { private final Provider<ListAccess> list; private final DynamicMap<RestView<AccessResource>> views; @Inject AccessCollection(Provider<ListAccess> list, DynamicMap<RestView<AccessResource>> views) { this.list = list; this.views = views; } @Override public RestView<TopLevelResource> list() { return list.get(); } @Override public AccessResource parse(TopLevelResource parent, IdString id) throws ResourceNotFoundException { throw new ResourceNotFoundException(id); } @Override public DynamicMap<RestView<AccessResource>> views() { return views; } }
apache-2.0
davidzchen/tensorflow
tensorflow/core/kernels/scan_ops.h
5228
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_CORE_KERNELS_SCAN_OPS_H_ #define TENSORFLOW_CORE_KERNELS_SCAN_OPS_H_ #include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" #include "tensorflow/core/framework/tensor_types.h" namespace tensorflow { namespace functor { typedef Eigen::Index Index; // TODO(b/154339590): Needs to be vectorized. template <typename Device, typename Reducer, typename T> struct Scan { void operator()(const Device& d, typename TTypes<T, 3>::ConstTensor in, typename TTypes<T, 3>::Tensor out, const Reducer& reducer, const bool reverse, const bool exclusive) { // Perform the reverse ops directly with Eigen, which avoids copying the // tensor twice compared to using individual ops. Eigen::array<bool, 3> dims; dims[0] = false; dims[1] = reverse; dims[2] = false; To32Bit(out).device(d) = To32Bit(in).reverse(dims).scan(1, reducer, exclusive).reverse(dims); } }; template <typename T> struct LogSumExp { EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T operator()(const T& a, const T& b) const { auto mi = Eigen::internal::scalar_min_op<T>()(a, b); auto ma = Eigen::internal::scalar_max_op<T>()(a, b); auto sub = Eigen::internal::scalar_difference_op<T>(); auto add = Eigen::internal::scalar_sum_op<T>(); auto exp = Eigen::internal::scalar_exp_op<T>(); auto log1p = Eigen::internal::scalar_log1p_op<T>(); auto cmp_lt = Eigen::internal::scalar_cmp_op<T, T, Eigen::internal::cmp_LT>(); auto logsumexp = add(log1p(exp(sub(mi, ma))), ma); return cmp_lt(ma, Eigen::NumTraits<T>::lowest()) ? ma : logsumexp; } EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T packetOp(const T& a, const T& b) const { auto mi = Eigen::internal::pmin(a, b); auto ma = Eigen::internal::pmax(a, b); using Eigen::internal::padd; using Eigen::internal::pcmp_lt; using Eigen::internal::pexp; using Eigen::internal::plog1p; using Eigen::internal::pset1; using Eigen::internal::psub; auto logsumexp = padd(plog1p(pexp(psub(mi, ma))), ma); return pselect(pcmp_lt(ma, pset1(Eigen::NumTraits<T>::lowest())), ma, logsumexp); } }; template <typename T> struct LogSumExpReducer { EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void reduce(const T t, T* accum) const { LogSumExp<T> logsumexp; *accum = logsumexp(*accum, t); } template <typename Packet> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void reducePacket(const Packet& p, Packet* accum) const { LogSumExp<T> logsumexp; *accum = logsumexp.packetOp(*accum, p); } EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T initialize() const { return -Eigen::NumTraits<T>::infinity(); } template <typename Packet> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Packet initializePacket() const { return Eigen::internal::pset1(initialize()); } EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T finalize(const T accum) const { return accum; } template <typename Packet> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Packet finalizePacket(const Packet& vaccum) const { return vaccum; } template <typename Packet> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T finalizeBoth(const T saccum, const Packet& vaccum) const { auto max_reducer = Eigen::internal::MaxReducer<T>(); auto sum_reducer = Eigen::internal::SumReducer<T>(); auto exp = Eigen::internal::scalar_exp_op<T>(); auto cmp_lt = Eigen::internal::scalar_cmp_op<T, T, Eigen::internal::cmp_LT>(); auto log = Eigen::internal::scalar_log_op<T>(); auto add = Eigen::internal::scalar_sum_op<T>(); using Eigen::internal::pexp; using Eigen::internal::psub; // `ma = max(x1, ..., xn)` // If the max of all of the `xi` is `-infinity` then the result is // -infinity. If the max is larger than `-infinity` then it's safe to use // for normalization even if the other elements are `-infinity`. // // `logsumexp(x1, ..., xn) = ma + log (exp(x1 - ma) + ... + exp(xn - ma))` auto ma = max_reducer.finalizeBoth(saccum, vaccum); auto logsumexp = add(log(sum_reducer.finalizeBoth( exp(saccum - ma), pexp(psub(vaccum, pset1(ma))))), ma); return cmp_lt(ma, Eigen::NumTraits<T>::lowest()) ? initialize() : logsumexp; } }; } // namespace functor } // namespace tensorflow #endif // TENSORFLOW_CORE_KERNELS_SCAN_OPS_H_
apache-2.0
calebTomlinson/Singularity
SingularityS3Base/src/main/java/com/hubspot/singularity/s3/base/config/SingularityS3Credentials.java
1701
package com.hubspot.singularity.s3.base.config; import static com.hubspot.mesos.JavaUtils.obfuscateValue; import java.util.Objects; import org.jets3t.service.security.AWSCredentials; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.hubspot.singularity.runner.base.jackson.Obfuscate; public class SingularityS3Credentials { private final String accessKey; private final String secretKey; @JsonCreator public SingularityS3Credentials(@JsonProperty("accessKey") String accessKey, @JsonProperty("secretKey") String secretKey) { this.accessKey = accessKey; this.secretKey = secretKey; } @Obfuscate public String getAccessKey() { return accessKey; } @Obfuscate public String getSecretKey() { return secretKey; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SingularityS3Credentials that = (SingularityS3Credentials) o; return Objects.equals(accessKey, that.accessKey) && Objects.equals(secretKey, that.secretKey); } @Override public int hashCode() { return Objects.hash(accessKey, secretKey); } @Override public String toString() { return "SingularityS3Credentials[" + "accessKey='" + obfuscateValue(accessKey) + '\'' + ", secretKey='" + obfuscateValue(secretKey) + '\'' + ']'; } @JsonIgnore public AWSCredentials toAWSCredentials() { return new AWSCredentials(accessKey, secretKey); } }
apache-2.0
badmishkallc/PoshOffice
src/System.IO.Packaging/PartBasedPackageProperties.cs
40857
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. //----------------------------------------------------------------------------- // // Description: // The package properties are a subset of the standard OLE property sets // SummaryInformation and DocumentSummaryInformation, and include such properties // as Title and Subject. // //----------------------------------------------------------------------------- using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Text; using System.Xml; namespace System.IO.Packaging { using Properties; /// <summary> /// The package properties are a subset of the standard OLE property sets /// SummaryInformation and DocumentSummaryInformation, and include such properties /// as Title and Subject. /// </summary> /// <remarks> /// <para>Setting a property to null deletes this property. 'null' is never strictly speaking /// a property value, but an absence indicator.</para> /// </remarks> internal class PartBasedPackageProperties : PackageProperties { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ #region Constructors internal PartBasedPackageProperties(Package package) { _package = package; // Initialize literals as Xml Atomic strings. _nameTable = PackageXmlStringTable.NameTable; ReadPropertyValuesFromPackage(); // No matter what happens during initialization, the dirty flag should not be set. _dirty = false; } #endregion Constructors //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ //------------------------------------------------------ // // Public Properties // //------------------------------------------------------ #region Public Properties /// <value> /// The primary creator. The identification is environment-specific and /// can consist of a name, email address, employee ID, etc. It is /// recommended that this value be only as verbose as necessary to /// identify the individual. /// </value> public override string Creator { get { return (string)GetPropertyValue(PackageXmlEnum.Creator); } set { RecordNewBinding(PackageXmlEnum.Creator, value); } } /// <value> /// The title. /// </value> public override string Title { get { return (string)GetPropertyValue(PackageXmlEnum.Title); } set { RecordNewBinding(PackageXmlEnum.Title, value); } } /// <value> /// The topic of the contents. /// </value> public override string Subject { get { return (string)GetPropertyValue(PackageXmlEnum.Subject); } set { RecordNewBinding(PackageXmlEnum.Subject, value); } } /// <value> /// The category. This value is typically used by UI applications to create navigation /// controls. /// </value> public override string Category { get { return (string)GetPropertyValue(PackageXmlEnum.Category); } set { RecordNewBinding(PackageXmlEnum.Category, value); } } /// <value> /// A delimited set of keywords to support searching and indexing. This /// is typically a list of terms that are not available elsewhere in the /// properties. /// </value> public override string Keywords { get { return (string)GetPropertyValue(PackageXmlEnum.Keywords); } set { RecordNewBinding(PackageXmlEnum.Keywords, value); } } /// <value> /// The description or abstract of the contents. /// </value> public override string Description { get { return (string)GetPropertyValue(PackageXmlEnum.Description); } set { RecordNewBinding(PackageXmlEnum.Description, value); } } /// <value> /// The type of content represented, generally defined by a specific /// use and intended audience. Example values include "Whitepaper", /// "Security Bulletin", and "Exam". (This property is distinct from /// MIME content types as defined in RFC 2616.) /// </value> public override string ContentType { get { string contentType = GetPropertyValue(PackageXmlEnum.ContentType) as string; return contentType; } set { RecordNewBinding(PackageXmlEnum.ContentType, value); } } /// <value> /// The status of the content. Example values include "Draft", /// "Reviewed", and "Final". /// </value> public override string ContentStatus { get { return (string)GetPropertyValue(PackageXmlEnum.ContentStatus); } set { RecordNewBinding(PackageXmlEnum.ContentStatus, value); } } /// <value> /// The version number. This value is set by the user or by the application. /// </value> public override string Version { get { return (string)GetPropertyValue(PackageXmlEnum.Version); } set { RecordNewBinding(PackageXmlEnum.Version, value); } } /// <value> /// The revision number. This value indicates the number of saves or /// revisions. The application is responsible for updating this value /// after each revision. /// </value> public override string Revision { get { return (string)GetPropertyValue(PackageXmlEnum.Revision); } set { RecordNewBinding(PackageXmlEnum.Revision, value); } } /// <value> /// The creation date and time. /// </value> public override Nullable<DateTime> Created { get { return GetDateTimePropertyValue(PackageXmlEnum.Created); } set { RecordNewBinding(PackageXmlEnum.Created, value); } } /// <value> /// The date and time of the last modification. /// </value> public override Nullable<DateTime> Modified { get { return GetDateTimePropertyValue(PackageXmlEnum.Modified); } set { RecordNewBinding(PackageXmlEnum.Modified, value); } } /// <value> /// The user who performed the last modification. The identification is /// environment-specific and can consist of a name, email address, /// employee ID, etc. It is recommended that this value be only as /// verbose as necessary to identify the individual. /// </value> public override string LastModifiedBy { get { return (string)GetPropertyValue(PackageXmlEnum.LastModifiedBy); } set { RecordNewBinding(PackageXmlEnum.LastModifiedBy, value); } } /// <value> /// The date and time of the last printing. /// </value> public override Nullable<DateTime> LastPrinted { get { return GetDateTimePropertyValue(PackageXmlEnum.LastPrinted); } set { RecordNewBinding(PackageXmlEnum.LastPrinted, value); } } /// <value> /// A language of the intellectual content of the resource /// </value> public override string Language { get { return (string)GetPropertyValue(PackageXmlEnum.Language); } set { RecordNewBinding(PackageXmlEnum.Language, value); } } /// <value> /// A unique identifier. /// </value> public override string Identifier { get { return (string)GetPropertyValue(PackageXmlEnum.Identifier); } set { RecordNewBinding(PackageXmlEnum.Identifier, value); } } #endregion Public Properties //------------------------------------------------------ // // Internal Methods // //------------------------------------------------------ #region Internal Methods // Invoked from Package.Flush. // The expectation is that whatever is currently dirty will get flushed. internal void Flush() { if (!_dirty) return; // Make sure there is a part to write to and that it contains // the expected start markup. EnsureXmlWriter(); // Write the property elements and clear _dirty. SerializeDirtyProperties(); // add closing markup and close the writer. CloseXmlWriter(); } // Invoked from Package.Close. internal void Close() { Flush(); } #endregion Internal Methods //------------------------------------------------------ // // Internal Properties // //------------------------------------------------------ //------------------------------------------------------ // // Private Methods // //------------------------------------------------------ #region Private Methods // The property store is implemented as a hash table of objects. // Keys are taken from the set of string constants defined in this // class and compared by their references rather than their values. private object GetPropertyValue(PackageXmlEnum propertyName) { _package.ThrowIfWriteOnly(); if (!_propertyDictionary.ContainsKey(propertyName)) return null; return _propertyDictionary[propertyName]; } // Shim function to adequately cast the result of GetPropertyValue. private Nullable<DateTime> GetDateTimePropertyValue(PackageXmlEnum propertyName) { object valueObject = GetPropertyValue(propertyName); if (valueObject == null) return null; // If an object is there, it will be a DateTime (not a Nullable<DateTime>). return (Nullable<DateTime>)valueObject; } // Set new property value. // Override that sets the initializing flag to false to reflect the default // situation: recording a binding to implement a value assignment. private void RecordNewBinding(PackageXmlEnum propertyenum, object value) { RecordNewBinding(propertyenum, value, false /* not invoked at construction */, null); } // Set new property value. // Null value is passed for deleting a property. // While initializing, we are not assigning new values, and so the dirty flag should // stay untouched. private void RecordNewBinding(PackageXmlEnum propertyenum, object value, bool initializing, XmlReader reader) { // If we are reading values from the package, reader cannot be null Debug.Assert(!initializing || reader != null); if (!initializing) _package.ThrowIfReadOnly(); // Case of an existing property. if (_propertyDictionary.ContainsKey(propertyenum)) { // Parsing should detect redundant entries. if (initializing) { throw new XmlException(Formatter.Format(Resources.DuplicateCorePropertyName, reader.Name), null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } // Nullable<DateTime> values can be checked against null if (value == null) // a deletion { _propertyDictionary.Remove(propertyenum); } else // an update { _propertyDictionary[propertyenum] = value; } // If the binding is an assignment rather than an initialization, set the dirty flag. _dirty = !initializing; } // Case of an initial value being set for a property. else { _propertyDictionary.Add(propertyenum, value); // If the binding is an assignment rather than an initialization, set the dirty flag. _dirty = !initializing; } } // Initialize object from property values found in package. // All values will remain null if the package is not enabled for reading. private void ReadPropertyValuesFromPackage() { Debug.Assert(_propertyPart == null); // This gets called exclusively from constructor. // Don't try to read properties from the package it does not have read access if (_package.FileOpenAccess == FileAccess.Write) return; _propertyPart = GetPropertyPart(); if (_propertyPart == null) return; ParseCorePropertyPart(_propertyPart); } // Locate core properties part using the package relationship that points to it. private PackagePart GetPropertyPart() { // Find a package-wide relationship of type CoreDocumentPropertiesRelationshipType. PackageRelationship corePropertiesRelationship = GetCorePropertiesRelationship(); if (corePropertiesRelationship == null) return null; // Retrieve the part referenced by its target URI. if (corePropertiesRelationship.TargetMode != TargetMode.Internal) throw new FileFormatException(Resources.NoExternalTargetForMetadataRelationship); PackagePart propertiesPart = null; Uri propertiesPartUri = PackUriHelper.ResolvePartUri( PackUriHelper.PackageRootUri, corePropertiesRelationship.TargetUri); if (!_package.PartExists(propertiesPartUri)) throw new FileFormatException(Resources.DanglingMetadataRelationship); propertiesPart = _package.GetPart(propertiesPartUri); if (!propertiesPart.ValidatedContentType.AreTypeAndSubTypeEqual(s_coreDocumentPropertiesContentType)) { throw new FileFormatException(Resources.WrongContentTypeForPropertyPart); } return propertiesPart; } // Find a package-wide relationship of type CoreDocumentPropertiesRelationshipType. private PackageRelationship GetCorePropertiesRelationship() { PackageRelationship propertiesPartRelationship = null; foreach (PackageRelationship rel in _package.GetRelationshipsByType(CoreDocumentPropertiesRelationshipType)) { if (propertiesPartRelationship != null) { throw new FileFormatException(Resources.MoreThanOneMetadataRelationships); } propertiesPartRelationship = rel; } return propertiesPartRelationship; } // Deserialize properties part. private void ParseCorePropertyPart(PackagePart part) { XmlReaderSettings xrs = new XmlReaderSettings(); xrs.NameTable = _nameTable; using (Stream stream = part.GetStream(FileMode.Open, FileAccess.Read)) // Create a reader that uses _nameTable so as to use the set of tag literals // in effect as a set of atomic identifiers. using (XmlReader reader = XmlReader.Create(stream, xrs)) { //This method expects the reader to be in ReadState.Initial. //It will make the first read call. PackagingUtilities.PerformInitailReadAndVerifyEncoding(reader); //Note: After the previous method call the reader should be at the first tag in the markup. //MoveToContent - Skips over the following - ProcessingInstruction, DocumentType, Comment, Whitespace, or SignificantWhitespace //If the reader is currently at a content node then this function call is a no-op if (reader.MoveToContent() != XmlNodeType.Element || (object)reader.NamespaceURI != PackageXmlStringTable.GetXmlStringAsObject(PackageXmlEnum.PackageCorePropertiesNamespace) || (object)reader.LocalName != PackageXmlStringTable.GetXmlStringAsObject(PackageXmlEnum.CoreProperties)) { throw new XmlException(Resources.CorePropertiesElementExpected, null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } // The schema is closed and defines no attributes on the root element. if (PackagingUtilities.GetNonXmlnsAttributeCount(reader) != 0) { throw new XmlException(Formatter.Format(Resources.PropertyWrongNumbOfAttribsDefinedOn, reader.Name), null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } // Iterate through property elements until EOF. Note the proper closing of all // open tags is checked by the reader itself. // This loop deals only with depth-1 start tags. Handling of element content // is delegated to dedicated functions. int attributesCount; while (reader.Read() && reader.MoveToContent() != XmlNodeType.None) { // Ignore end-tags. We check element errors on opening tags. if (reader.NodeType == XmlNodeType.EndElement) continue; // Any content markup that is not an element here is unexpected. if (reader.NodeType != XmlNodeType.Element) { throw new XmlException(Resources.PropertyStartTagExpected, null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } // Any element below the root should open at level 1 exclusively. if (reader.Depth != 1) { throw new XmlException(Resources.NoStructuredContentInsideProperties, null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } attributesCount = PackagingUtilities.GetNonXmlnsAttributeCount(reader); // Property elements can occur in any order (xsd:all). object localName = reader.LocalName; PackageXmlEnum xmlStringIndex = PackageXmlStringTable.GetEnumOf(localName); String valueType = PackageXmlStringTable.GetValueType(xmlStringIndex); if (Array.IndexOf(s_validProperties, xmlStringIndex) == -1) // An unexpected element is an error. { throw new XmlException( Formatter.Format(Resources.InvalidPropertyNameInCorePropertiesPart, reader.LocalName), null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } // Any element not in the valid core properties namespace is unexpected. // The following is an object comparison, not a string comparison. if ((object)reader.NamespaceURI != PackageXmlStringTable.GetXmlStringAsObject(PackageXmlStringTable.GetXmlNamespace(xmlStringIndex))) { throw new XmlException(Resources.UnknownNamespaceInCorePropertiesPart, null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } if (String.CompareOrdinal(valueType, "String") == 0) { // The schema is closed and defines no attributes on this type of element. if (attributesCount != 0) { throw new XmlException(Formatter.Format(Resources.PropertyWrongNumbOfAttribsDefinedOn, reader.Name), null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } RecordNewBinding(xmlStringIndex, GetStringData(reader), true /*initializing*/, reader); } else if (String.CompareOrdinal(valueType, "DateTime") == 0) { int allowedAttributeCount = (object)reader.NamespaceURI == PackageXmlStringTable.GetXmlStringAsObject(PackageXmlEnum.DublinCoreTermsNamespace) ? 1 : 0; // The schema is closed and defines no attributes on this type of element. if (attributesCount != allowedAttributeCount) { throw new XmlException(Formatter.Format(Resources.PropertyWrongNumbOfAttribsDefinedOn, reader.Name), null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } if (allowedAttributeCount != 0) { ValidateXsiType(reader, PackageXmlStringTable.GetXmlStringAsObject(PackageXmlEnum.DublinCoreTermsNamespace), W3cdtf); } RecordNewBinding(xmlStringIndex, GetDateData(reader), true /*initializing*/, reader); } else // An unexpected element is an error. { Debug.Assert(false, "Unknown value type for properties"); } } } } // This method validates xsi:type="dcterms:W3CDTF" // The valude of xsi:type is a qualified name. It should have a prefix that matches // the xml namespace (ns) within the scope and the name that matches name // The comparisons should be case-sensitive comparisons internal static void ValidateXsiType(XmlReader reader, Object ns, string name) { // Get the value of xsi;type String typeValue = reader.GetAttribute(PackageXmlStringTable.GetXmlString(PackageXmlEnum.Type), PackageXmlStringTable.GetXmlString(PackageXmlEnum.XmlSchemaInstanceNamespace)); // Missing xsi:type if (typeValue == null) { throw new XmlException(Formatter.Format(Resources.UnknownDCDateTimeXsiType, reader.Name), null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } int index = typeValue.IndexOf(':'); // The valude of xsi:type is not a qualified name if (index == -1) { throw new XmlException(Formatter.Format(Resources.UnknownDCDateTimeXsiType, reader.Name), null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } // Check the following conditions // The namespace of the prefix (string before ":") matches "ns" // The name (string after ":") matches "name" if (!ReferenceEquals(ns, reader.LookupNamespace(typeValue.Substring(0, index))) || String.CompareOrdinal(name, typeValue.Substring(index + 1, typeValue.Length - index - 1)) != 0) { throw new XmlException(Formatter.Format(Resources.UnknownDCDateTimeXsiType, reader.Name), null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } } // Expect to find text data and return its value. private string GetStringData(XmlReader reader) { if (reader.IsEmptyElement) return string.Empty; reader.Read(); if (reader.MoveToContent() == XmlNodeType.EndElement) return string.Empty; // If there is any content in the element, it should be text content and nothing else. if (reader.NodeType != XmlNodeType.Text) { throw new XmlException(Resources.NoStructuredContentInsideProperties, null, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } return reader.Value; } // Expect to find text data and return its value as DateTime. private Nullable<DateTime> GetDateData(XmlReader reader) { string data = GetStringData(reader); DateTime dateTime; try { // Note: No more than 7 second decimals are accepted by the // list of formats given. There currently is no method that // would perform XSD-compliant parsing. dateTime = DateTime.ParseExact(data, s_dateTimeFormats, CultureInfo.InvariantCulture, DateTimeStyles.None); } catch (FormatException exc) { throw new XmlException(Resources.XsdDateTimeExpected, exc, ((IXmlLineInfo)reader).LineNumber, ((IXmlLineInfo)reader).LinePosition); } return dateTime; } // Make sure there is a part to write to and that it contains // the expected start markup. private void EnsureXmlWriter() { if (_xmlWriter != null) return; EnsurePropertyPart(); // Should succeed or throw an exception. Stream writerStream = new IgnoreFlushAndCloseStream(_propertyPart.GetStream(FileMode.Create, FileAccess.Write)); _xmlWriter = XmlWriter.Create(writerStream, new XmlWriterSettings { Encoding = Encoding.UTF8 }); WriteXmlStartTagsForPackageProperties(); } // Create a property part if none exists yet. private void EnsurePropertyPart() { if (_propertyPart != null) return; // If _propertyPart is null, no property part existed when this object was created, // and this function is being called for the first time. // However, when read access is available, we can afford the luxury of checking whether // a property part and its referring relationship got correctly created in the meantime // outside of this class. // In write-only mode, it is impossible to perform this check, and the external creation // scenario will result in an exception being thrown. if (_package.FileOpenAccess == FileAccess.Read || _package.FileOpenAccess == FileAccess.ReadWrite) { _propertyPart = GetPropertyPart(); if (_propertyPart != null) return; } CreatePropertyPart(); } // Create a new property relationship pointing to a new property part. // If only this class is used for manipulating property relationships, there cannot be a // pre-existing dangling property relationship. // No check is performed here for other classes getting misused insofar as this function // has to work in write-only mode. private void CreatePropertyPart() { _propertyPart = _package.CreatePart(GeneratePropertyPartUri(), s_coreDocumentPropertiesContentType.ToString()); _package.CreateRelationship(_propertyPart.Uri, TargetMode.Internal, CoreDocumentPropertiesRelationshipType); } private Uri GeneratePropertyPartUri() { string propertyPartName = DefaultPropertyPartNamePrefix + Guid.NewGuid().ToString(GuidStorageFormatString) + DefaultPropertyPartNameExtension; return PackUriHelper.CreatePartUri(new Uri(propertyPartName, UriKind.Relative)); } private void WriteXmlStartTagsForPackageProperties() { _xmlWriter.WriteStartDocument(); // <coreProperties _xmlWriter.WriteStartElement(PackageXmlStringTable.GetXmlString(PackageXmlEnum.CoreProperties), // local name PackageXmlStringTable.GetXmlString(PackageXmlEnum.PackageCorePropertiesNamespace)); // namespace // xmlns:dc _xmlWriter.WriteAttributeString(PackageXmlStringTable.GetXmlString(PackageXmlEnum.XmlNamespacePrefix), PackageXmlStringTable.GetXmlString(PackageXmlEnum.DublinCorePropertiesNamespacePrefix), null, PackageXmlStringTable.GetXmlString(PackageXmlEnum.DublinCorePropertiesNamespace)); // xmlns:dcterms _xmlWriter.WriteAttributeString(PackageXmlStringTable.GetXmlString(PackageXmlEnum.XmlNamespacePrefix), PackageXmlStringTable.GetXmlString(PackageXmlEnum.DublincCoreTermsNamespacePrefix), null, PackageXmlStringTable.GetXmlString(PackageXmlEnum.DublinCoreTermsNamespace)); // xmlns:xsi _xmlWriter.WriteAttributeString(PackageXmlStringTable.GetXmlString(PackageXmlEnum.XmlNamespacePrefix), PackageXmlStringTable.GetXmlString(PackageXmlEnum.XmlSchemaInstanceNamespacePrefix), null, PackageXmlStringTable.GetXmlString(PackageXmlEnum.XmlSchemaInstanceNamespace)); } // Write the property elements and clear _dirty. private void SerializeDirtyProperties() { // Create a property element for each non-null entry. foreach (KeyValuePair<PackageXmlEnum, Object> entry in _propertyDictionary) { Debug.Assert(entry.Value != null); PackageXmlEnum propertyNamespace = PackageXmlStringTable.GetXmlNamespace(entry.Key); _xmlWriter.WriteStartElement(PackageXmlStringTable.GetXmlString(entry.Key), PackageXmlStringTable.GetXmlString(propertyNamespace)); if (entry.Value is Nullable<DateTime>) { if (propertyNamespace == PackageXmlEnum.DublinCoreTermsNamespace) { // xsi:type= _xmlWriter.WriteStartAttribute(PackageXmlStringTable.GetXmlString(PackageXmlEnum.Type), PackageXmlStringTable.GetXmlString(PackageXmlEnum.XmlSchemaInstanceNamespace)); // "dcterms:W3CDTF" _xmlWriter.WriteQualifiedName(W3cdtf, PackageXmlStringTable.GetXmlString(PackageXmlEnum.DublinCoreTermsNamespace)); _xmlWriter.WriteEndAttribute(); } // Use sortable ISO 8601 date/time pattern. Include second fractions down to the 100-nanosecond interval, // which is the definition of a "tick" for the DateTime type. _xmlWriter.WriteString(XmlConvert.ToString(((Nullable<DateTime>)entry.Value).Value.ToUniversalTime(), "yyyy-MM-ddTHH:mm:ss.fffffffZ")); } else { // The following uses the fact that ToString is virtual. _xmlWriter.WriteString(entry.Value.ToString()); } _xmlWriter.WriteEndElement(); } // Mark properties as saved. _dirty = false; } // Add end markup and close the writer. private void CloseXmlWriter() { // Close the root element. _xmlWriter.WriteEndElement(); // Close the writer itself. _xmlWriter.Dispose(); // Make sure we know it's closed. _xmlWriter = null; } #endregion Private Methods //------------------------------------------------------ // // Private fields // //------------------------------------------------------ #region Private Fields private Package _package; private PackagePart _propertyPart; private XmlWriter _xmlWriter; // Table of objects from the closed set of literals defined below. // (Uses object comparison rather than string comparison.) private const int NumCoreProperties = 16; private Dictionary<PackageXmlEnum, Object> _propertyDictionary = new Dictionary<PackageXmlEnum, Object>(NumCoreProperties); private bool _dirty = false; // This System.Xml.NameTable makes sure that we use the same references to strings // throughout (including when parsing Xml) and so can perform reference comparisons // rather than value comparisons. private NameTable _nameTable; // Literals. private static readonly ContentType s_coreDocumentPropertiesContentType = new ContentType("application/vnd.openxmlformats-package.core-properties+xml"); private const string CoreDocumentPropertiesRelationshipType = "http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties"; private const string DefaultPropertyPartNamePrefix = "/package/services/metadata/core-properties/"; private const string W3cdtf = "W3CDTF"; private const string DefaultPropertyPartNameExtension = ".psmdcp"; private const string GuidStorageFormatString = @"N"; // N - simple format without adornments private static PackageXmlEnum[] s_validProperties = new PackageXmlEnum[] { PackageXmlEnum.Creator, PackageXmlEnum.Identifier, PackageXmlEnum.Title, PackageXmlEnum.Subject, PackageXmlEnum.Description, PackageXmlEnum.Language, PackageXmlEnum.Created, PackageXmlEnum.Modified, PackageXmlEnum.ContentType, PackageXmlEnum.Keywords, PackageXmlEnum.Category, PackageXmlEnum.Version, PackageXmlEnum.LastModifiedBy, PackageXmlEnum.ContentStatus, PackageXmlEnum.Revision, PackageXmlEnum.LastPrinted }; // Array of formats to supply to XmlConvert.ToDateTime or DateTime.ParseExact. // xsd:DateTime requires full date time in sortable (ISO 8601) format. // It can be expressed in local time, universal time (Z), or relative to universal time (zzz). // Negative years are accepted. // IMPORTANT: Second fractions are recognized only down to 1 tenth of a microsecond because this is the resolution // of the DateTime type. The Xml standard, however, allows any number of decimals; but XmlConvert only offers // this very awkward API with an explicit pattern enumeration. private static readonly string[] s_dateTimeFormats = new string[] { "yyyy-MM-ddTHH:mm:ss", "yyyy-MM-ddTHH:mm:ssZ", "yyyy-MM-ddTHH:mm:sszzz", @"\-yyyy-MM-ddTHH:mm:ss", @"\-yyyy-MM-ddTHH:mm:ssZ", @"\-yyyy-MM-ddTHH:mm:sszzz", "yyyy-MM-ddTHH:mm:ss.ff", "yyyy-MM-ddTHH:mm:ss.fZ", "yyyy-MM-ddTHH:mm:ss.fzzz", @"\-yyyy-MM-ddTHH:mm:ss.f", @"\-yyyy-MM-ddTHH:mm:ss.fZ", @"\-yyyy-MM-ddTHH:mm:ss.fzzz", "yyyy-MM-ddTHH:mm:ss.ff", "yyyy-MM-ddTHH:mm:ss.ffZ", "yyyy-MM-ddTHH:mm:ss.ffzzz", @"\-yyyy-MM-ddTHH:mm:ss.ff", @"\-yyyy-MM-ddTHH:mm:ss.ffZ", @"\-yyyy-MM-ddTHH:mm:ss.ffzzz", "yyyy-MM-ddTHH:mm:ss.fff", "yyyy-MM-ddTHH:mm:ss.fffZ", "yyyy-MM-ddTHH:mm:ss.fffzzz", @"\-yyyy-MM-ddTHH:mm:ss.fff", @"\-yyyy-MM-ddTHH:mm:ss.fffZ", @"\-yyyy-MM-ddTHH:mm:ss.fffzzz", "yyyy-MM-ddTHH:mm:ss.ffff", "yyyy-MM-ddTHH:mm:ss.ffffZ", "yyyy-MM-ddTHH:mm:ss.ffffzzz", @"\-yyyy-MM-ddTHH:mm:ss.ffff", @"\-yyyy-MM-ddTHH:mm:ss.ffffZ", @"\-yyyy-MM-ddTHH:mm:ss.ffffzzz", "yyyy-MM-ddTHH:mm:ss.fffff", "yyyy-MM-ddTHH:mm:ss.fffffZ", "yyyy-MM-ddTHH:mm:ss.fffffzzz", @"\-yyyy-MM-ddTHH:mm:ss.fffff", @"\-yyyy-MM-ddTHH:mm:ss.fffffZ", @"\-yyyy-MM-ddTHH:mm:ss.fffffzzz", "yyyy-MM-ddTHH:mm:ss.ffffff", "yyyy-MM-ddTHH:mm:ss.ffffffZ", "yyyy-MM-ddTHH:mm:ss.ffffffzzz", @"\-yyyy-MM-ddTHH:mm:ss.ffffff", @"\-yyyy-MM-ddTHH:mm:ss.ffffffZ", @"\-yyyy-MM-ddTHH:mm:ss.ffffffzzz", "yyyy-MM-ddTHH:mm:ss.fffffff", "yyyy-MM-ddTHH:mm:ss.fffffffZ", "yyyy-MM-ddTHH:mm:ss.fffffffzzz", @"\-yyyy-MM-ddTHH:mm:ss.fffffff", @"\-yyyy-MM-ddTHH:mm:ss.fffffffZ", @"\-yyyy-MM-ddTHH:mm:ss.fffffffzzz", }; #endregion Private Fields } }
artistic-2.0
gitpan/Pony-Object
t/ignore/Object/Animal/Cow.pm
749
package Object::Animal::Cow; use Pony::Object qw(Object::Animal::Artiodactyls Object::Animal::ICow ); private type => 'cow'; protected word => 'moo'; protected yieldCount => 0; private milkFactor => 5; sub getLegsCount : Public { return shift->legs; } sub getMilk : Public { ++shift->yieldCount; } sub getYieldOfMilk : Public { my $this = shift; return $this->calcYield( $this->milkFactor, $this->yieldCount ) } sub calcYield : Protected { my $this = shift; my ($a, $b) = @_; return $a * $b; } 1;
artistic-2.0
TizenChameleon/uifw-evas
src/modules/engines/software_8_x11/evas_engine.c
16977
#include "evas_common.h" #include "evas_private.h" #include "evas_engine.h" #include "Evas_Engine_Software_8_X11.h" #include "evas_common_soft8.h" #include <pixman.h> int _evas_engine_soft8_x11_log_dom = -1; /* function tables - filled in later (func and parent func) */ static Evas_Func func, pfunc; /* engine struct data */ typedef struct _Render_Engine Render_Engine; struct _Render_Engine { xcb_connection_t *connection; xcb_drawable_t drawable; xcb_screen_t *screen; unsigned char pal[256]; int depth; int w, h, rot; Tilebuf *tb; Tilebuf_Rect *rects; Tilebuf_Rect *cur_rect; X_Output_Buffer *shbuf; Soft8_Image *tmp_out; /* used by indirect render, like rotation */ pixman_region16_t *clip_rects; unsigned char end:1; unsigned char shm:1; }; /* prototypes we will use here */ static void *eng_info(Evas * e); static void eng_info_free(Evas * e, void *info); static int eng_setup(Evas * e, void *info); static void eng_output_free(void *data); static void eng_output_resize(void *data, int w, int h); static void eng_output_tile_size_set(void *data, int w, int h); static void eng_output_redraws_rect_add(void *data, int x, int y, int w, int h); static void eng_output_redraws_rect_del(void *data, int x, int y, int w, int h); static void eng_output_redraws_clear(void *data); static void *eng_output_redraws_next_update_get(void *data, int *x, int *y, int *w, int *h, int *cx, int *cy, int *cw, int *ch); static void eng_output_redraws_next_update_push(void *data, void *surface, int x, int y, int w, int h); static void eng_output_flush(void *data); static void eng_output_idle_flush(void *data); /* engine api this module provides */ static void * eng_info(Evas * e) { Evas_Engine_Info_Software_8_X11 *info; info = calloc(1, sizeof(Evas_Engine_Info_Software_8_X11)); if (!info) return NULL; info->magic.magic = rand(); info->render_mode = EVAS_RENDER_MODE_BLOCKING; return info; e = NULL; } static void eng_info_free(Evas * e __UNUSED__, void *info) { Evas_Engine_Info_Software_8_X11 *in; in = (Evas_Engine_Info_Software_8_X11 *) info; free(in); } static void _tmp_out_alloc(Render_Engine * re) { Tilebuf_Rect *r; unsigned int w = 0, h = 0; EINA_INLIST_FOREACH(re->rects, r) { if (r->w > (int)w) w = r->w; if (r->h > (int)h) h = r->h; } if (re->tmp_out) { if ((re->tmp_out->cache_entry.w < w) || (re->tmp_out->cache_entry.h < h)) { evas_cache_image_drop(&re->tmp_out->cache_entry); re->tmp_out = NULL; } } if (!re->tmp_out) { Soft8_Image *im; im = (Soft8_Image *) evas_cache_image_empty(evas_common_soft8_image_cache_get()); im->cache_entry.flags.alpha = 0; evas_cache_image_surface_alloc(&im->cache_entry, w, h); re->tmp_out = im; } } static void alloc_colors(Render_Engine * re) { xcb_alloc_color_cookie_t color_rq[256]; xcb_alloc_color_reply_t *rep; xcb_colormap_t colormap = re->screen->default_colormap; int i; for (i = 0; i < 256; i++) color_rq[i] = xcb_alloc_color(re->connection, colormap, i << 8, i << 8, i << 8); for (i = 0; i < 256; i++) { rep = xcb_alloc_color_reply(re->connection, color_rq[i], NULL); re->pal[i] = rep->pixel; free(rep); } } static int eng_setup(Evas * e, void *in) { Render_Engine *re; Evas_Engine_Info_Software_8_X11 *info; /* X_Output_Buffer *xob; */ info = (Evas_Engine_Info_Software_8_X11 *) in; if (!e->engine.data.output) { /* the only check - simplistic, i know, but enough for this * "special purpose" engine. Remember it is meant to be used * for limited power devices that have a 8bit display mode * and no real other acceleration, and high resolution so we * can pre-dither into 8bpp. */ // if (DefaultDepth(info->info.display, // DefaultScreen(info->info.display)) != 8) // return; /* do common routine init - we wil at least use it for core * image loading and font loading/glyph rendering & placement */ evas_common_cpu_init(); evas_common_blend_init(); evas_common_image_init(); evas_common_convert_init(); evas_common_scale_init(); evas_common_rectangle_init(); evas_common_polygon_init(); evas_common_line_init(); evas_common_font_init(); evas_common_draw_init(); evas_common_tilebuf_init(); evas_common_soft8_image_init(); /* render engine specific data */ re = calloc(1, sizeof(Render_Engine)); if (!re) return 0; e->engine.data.output = re; re->connection = info->info.connection; re->screen = info->info.screen; re->drawable = info->info.drawable; re->depth = info->info.depth; re->w = e->output.w; re->h = e->output.h; re->rot = info->info.rotation; re->tb = evas_common_tilebuf_new(e->output.w, e->output.h); if (re->tb) evas_common_tilebuf_set_tile_size(re->tb, TILESIZE, TILESIZE); alloc_colors(re); } else { /* we changed the info after first init - do a re-eval where * appropriate */ re = e->engine.data.output; if (re->tb) evas_common_tilebuf_free(re->tb); re->connection = info->info.connection; re->screen = info->info.screen; re->drawable = info->info.drawable; re->w = e->output.w; re->h = e->output.h; re->rot = info->info.rotation; re->tb = evas_common_tilebuf_new(e->output.w, e->output.h); if (re->tb) evas_common_tilebuf_set_tile_size(re->tb, TILESIZE, TILESIZE); if (re->tmp_out) { evas_cache_image_drop(&re->tmp_out->cache_entry); re->tmp_out = NULL; } alloc_colors(re); } if (!e->engine.data.output) return 0; /* add a draw context if we dont have one */ if (!e->engine.data.context) e->engine.data.context = e->engine.func->context_new(e->engine.data.output); /* check if the display can do shm */ re->shm = evas_software_8_x11_x_can_do_shm(re->connection, re->screen); return 1; } static void eng_output_free(void *data) { Render_Engine *re; re = (Render_Engine *) data; if (re->shbuf) evas_software_8_x11_x_output_buffer_free(re->shbuf, 0); if (re->clip_rects) { pixman_region_fini(re->clip_rects); free(re->clip_rects); re->clip_rects = NULL; } if (re->tb) evas_common_tilebuf_free(re->tb); if (re->rects) evas_common_tilebuf_free_render_rects(re->rects); if (re->tmp_out) evas_cache_image_drop(&re->tmp_out->cache_entry); free(re); evas_common_font_shutdown(); evas_common_image_shutdown(); evas_common_soft8_image_shutdown(); } static void eng_output_resize(void *data, int w, int h) { Render_Engine *re; re = (Render_Engine *) data; if ((re->w == w) && (re->h == h)) return; evas_common_tilebuf_free(re->tb); re->w = w; re->h = h; re->tb = evas_common_tilebuf_new(w, h); if (re->tb) evas_common_tilebuf_set_tile_size(re->tb, TILESIZE, TILESIZE); if (re->shbuf) { evas_software_8_x11_x_output_buffer_free(re->shbuf, 0); re->shbuf = NULL; } if (re->clip_rects) { pixman_region_fini(re->clip_rects); free(re->clip_rects); re->clip_rects = NULL; } if (re->tmp_out) { evas_cache_image_drop(&re->tmp_out->cache_entry); re->tmp_out = NULL; } } static void eng_output_tile_size_set(void *data, int w, int h) { Render_Engine *re; re = (Render_Engine *) data; evas_common_tilebuf_set_tile_size(re->tb, w, h); } static void eng_output_redraws_rect_add(void *data, int x, int y, int w, int h) { Render_Engine *re; re = (Render_Engine *) data; evas_common_tilebuf_add_redraw(re->tb, x, y, w, h); } static void eng_output_redraws_rect_del(void *data, int x, int y, int w, int h) { Render_Engine *re; re = (Render_Engine *) data; evas_common_tilebuf_del_redraw(re->tb, x, y, w, h); } static void eng_output_redraws_clear(void *data) { Render_Engine *re; re = (Render_Engine *) data; evas_common_tilebuf_clear(re->tb); } static inline void _output_buffer_alloc(Render_Engine * re) { int w, h; if (re->shbuf) return; if ((re->rot == 0) || (re->rot == 180)) { w = re->w; h = re->h; } else { w = re->h; h = re->w; } re->shbuf = evas_software_8_x11_x_output_buffer_new (re->connection, re->screen, re->depth, re->pal, w, h, 1, NULL); re->shbuf->drawable = re->drawable; re->shbuf->screen = re->screen; if (re->shbuf->gc) { xcb_free_gc(re->shbuf->connection, re->shbuf->gc); re->shbuf->gc = 0; } re->shbuf->gc = xcb_generate_id(re->shbuf->connection); xcb_create_gc(re->shbuf->connection, re->shbuf->gc, re->shbuf->drawable, 0, NULL); } static void * eng_output_redraws_next_update_get(void *data, int *x, int *y, int *w, int *h, int *cx, int *cy, int *cw, int *ch) { Render_Engine *re; Tilebuf_Rect *rect; int ux, uy, uw, uh; re = (Render_Engine *) data; if (re->end) { re->end = 0; return NULL; } if (!re->rects) { re->rects = evas_common_tilebuf_get_render_rects(re->tb); if (!re->rects) return NULL; re->cur_rect = re->rects; _output_buffer_alloc(re); if (re->rot != 0) _tmp_out_alloc(re); /* grows if required */ } if (!re->cur_rect) { if (re->rects) evas_common_tilebuf_free_render_rects(re->rects); re->rects = NULL; return NULL; } rect = re->cur_rect; ux = rect->x; uy = rect->y; uw = rect->w; uh = rect->h; re->cur_rect = (Tilebuf_Rect *) ((EINA_INLIST_GET(re->cur_rect))->next); if (!re->cur_rect) { evas_common_tilebuf_free_render_rects(re->rects); re->rects = NULL; re->end = 1; } *x = ux; *y = uy; *w = uw; *h = uh; if (re->rot == 0) { *cx = ux; *cy = uy; *cw = uw; *ch = uh; return re->shbuf->im; } else { *cx = 0; *cy = 0; *cw = uw; *ch = uh; return re->tmp_out; } } static void _blit_rot_90(Soft8_Image * dst, const Soft8_Image * src, int out_x, int out_y, int w, int h) { DATA8 *dp, *sp; int x, y; sp = src->pixels; dp = dst->pixels + (out_x + (w + out_y - 1) * dst->stride); for (y = 0; y < h; y++) { DATA8 *dp_itr, *sp_itr; sp_itr = sp; dp_itr = dp; for (x = 0; x < w; x++) { *dp_itr = *sp_itr; sp_itr++; dp_itr -= dst->stride; } sp += src->stride; dp++; } } static void _blit_rot_180(Soft8_Image * dst, const Soft8_Image * src, int out_x, int out_y, int w, int h) { DATA8 *dp, *sp; int x, y; sp = src->pixels; dp = dst->pixels + ((w + out_x - 1) + (h + out_y - 1) * dst->stride); for (y = 0; y < h; y++) { DATA8 *dp_itr, *sp_itr; sp_itr = sp; dp_itr = dp; for (x = 0; x < w; x++) { *dp_itr = *sp_itr; sp_itr++; dp_itr--; } sp += src->stride; dp -= dst->stride; } } static void _blit_rot_270(Soft8_Image * dst, const Soft8_Image * src, int out_x, int out_y, int w, int h) { DATA8 *dp, *sp; int x, y; sp = src->pixels; dp = dst->pixels + ((h + out_x - 1) + out_y * dst->stride); for (y = 0; y < h; y++) { DATA8 *dp_itr, *sp_itr; sp_itr = sp; dp_itr = dp; for (x = 0; x < w; x++) { *dp_itr = *sp_itr; sp_itr++; dp_itr += dst->stride; } sp += src->stride; dp--; } } static void _tmp_out_process(Render_Engine * re, int out_x, int out_y, int w, int h) { Soft8_Image *d, *s; d = re->shbuf->im; s = re->tmp_out; if ((w < 1) || (h < 1) || (out_x >= (int)d->cache_entry.w) || (out_y >= (int)d->cache_entry.h)) return; if (re->rot == 90) _blit_rot_90(d, s, out_x, out_y, w, h); else if (re->rot == 180) _blit_rot_180(d, s, out_x, out_y, w, h); else if (re->rot == 270) _blit_rot_270(d, s, out_x, out_y, w, h); } static void eng_output_redraws_next_update_push(void *data, void *surface __UNUSED__, int x, int y, int w, int h) { Render_Engine *re; struct { int x, y, width, height; } r = { 0, 0, 0, 0}; re = (Render_Engine *) data; if (!re->clip_rects) { re->clip_rects = (pixman_region16_t *) malloc(sizeof(pixman_region16_t)); pixman_region_init(re->clip_rects); } if (re->rot == 0) { r.x = x; r.y = y; r.width = w; r.height = h; } else if (re->rot == 90) { r.x = y; r.y = re->w - w - x; r.width = h; r.height = w; } else if (re->rot == 180) { r.x = re->w - w - x; r.y = re->h - h - y; r.width = w; r.height = h; } else if (re->rot == 270) { r.x = re->h - h - y; r.y = x; r.width = h; r.height = w; } if (re->rot != 0) _tmp_out_process(re, r.x, r.y, w, h); pixman_region_union_rect(re->clip_rects, re->clip_rects, r.x, r.y, r.width, r.height); } static void eng_output_flush(void *data) { Render_Engine *re; re = (Render_Engine *) data; if (re->clip_rects) { re->shbuf->drawable = re->drawable; { int i; pixman_box16_t *rects = pixman_region_rectangles(re->clip_rects, NULL); for (i = 0; i < pixman_region_n_rects(re->clip_rects); i++, rects++) evas_software_8_x11_x_output_buffer_paste (re->shbuf, re->drawable, re->shbuf->gc, rects->x1, rects->y1, rects->x2 - rects->x1, rects->y2 - rects->y1, 1); } pixman_region_fini(re->clip_rects); free(re->clip_rects); re->clip_rects = NULL; } } static void eng_output_idle_flush(void *data) { Render_Engine *re; re = (Render_Engine *) data; if (re->shbuf) { evas_software_8_x11_x_output_buffer_free(re->shbuf, 0); re->shbuf = NULL; } if (re->clip_rects) { pixman_region_fini(re->clip_rects); free(re->clip_rects); re->clip_rects = NULL; } if (re->tmp_out) { evas_cache_image_drop(&re->tmp_out->cache_entry); re->tmp_out = NULL; } } static Eina_Bool eng_canvas_alpha_get(void *data __UNUSED__, void *context __UNUSED__) { return EINA_FALSE; } /* module advertising code */ static int module_open(Evas_Module * em) { if (!em) return 0; /* get whatever engine module we inherit from */ if (!_evas_module_engine_inherit(&pfunc, "software_8")) return 0; _evas_engine_soft8_x11_log_dom = eina_log_domain_register ("evas-software_8_x11", EVAS_DEFAULT_LOG_COLOR); if (_evas_engine_soft8_x11_log_dom < 0) { EINA_LOG_ERR("Can not create a module log domain."); return 0; } /* store it for later use */ func = pfunc; /* now to override methods */ #define ORD(f) EVAS_API_OVERRIDE(f, &func, eng_) ORD(info); ORD(info_free); ORD(setup); ORD(canvas_alpha_get); ORD(output_free); ORD(output_resize); ORD(output_tile_size_set); ORD(output_redraws_rect_add); ORD(output_redraws_rect_del); ORD(output_redraws_clear); ORD(output_redraws_next_update_get); ORD(output_redraws_next_update_push); ORD(output_flush); ORD(output_idle_flush); /* now advertise out own api */ em->functions = (void *)(&func); return 1; } static void module_close(Evas_Module * em __UNUSED__) { eina_log_domain_unregister(_evas_engine_soft8_x11_log_dom); } static Evas_Module_Api evas_modapi = { EVAS_MODULE_API_VERSION, "software_8_x11", "none", { module_open, module_close} }; EVAS_MODULE_DEFINE(EVAS_MODULE_TYPE_ENGINE, engine, software_8_x11); #ifndef EVAS_STATIC_BUILD_SOFTWARE_8_X11 EVAS_EINA_MODULE_DEFINE(engine, software_8_x11); #endif
bsd-2-clause
jabenninghoff/homebrew-core
Formula/cosi.rb
3409
require "language/go" class Cosi < Formula desc "Implementation of scalable collective signing" homepage "https://github.com/dedis/cosi" url "https://github.com/dedis/cosi/archive/0.8.6.tar.gz" sha256 "007e4c4def13fcecf7301d86f177f098c583151c8a3d940ccb4c65a84413a9eb" license "AGPL-3.0" bottle do cellar :any_skip_relocation sha256 "30bbb457c0fb67ee264331e434068a4a747ece4cbc536cb75d289a06e93988e2" => :catalina sha256 "2ddd695441977b1cd435fbae28d9aa864d48b7a90ec24971348d91b5d0e551df" => :mojave sha256 "00663999a04ee29f52e334022cc828d7ebe89a442f1e713afb2167112f4ebf75" => :high_sierra end depends_on "go" => :build go_resource "github.com/BurntSushi/toml" do url "https://github.com/BurntSushi/toml.git", revision: "f0aeabca5a127c4078abb8c8d64298b147264b55" end go_resource "github.com/daviddengcn/go-colortext" do url "https://github.com/daviddengcn/go-colortext.git", revision: "511bcaf42ccd42c38aba7427b6673277bf19e2a1" end go_resource "github.com/dedis/crypto" do url "https://github.com/dedis/crypto.git", revision: "d9272cb478c0942e1d60049e6df219cba2067fcd" end go_resource "github.com/dedis/protobuf" do url "https://github.com/dedis/protobuf.git", revision: "6948fbd96a0f1e4e96582003261cf647dc66c831" end go_resource "github.com/montanaflynn/stats" do url "https://github.com/montanaflynn/stats.git", revision: "60dcacf48f43d6dd654d0ed94120ff5806c5ca5c" end go_resource "github.com/satori/go.uuid" do url "https://github.com/satori/go.uuid.git", revision: "f9ab0dce87d815821e221626b772e3475a0d2749" end go_resource "golang.org/x/net" do url "https://go.googlesource.com/net.git", revision: "0c607074acd38c5f23d1344dfe74c977464d1257" end go_resource "gopkg.in/codegangsta/cli.v1" do url "https://gopkg.in/codegangsta/cli.v1.git", revision: "01857ac33766ce0c93856370626f9799281c14f4" end go_resource "gopkg.in/dedis/cothority.v0" do url "https://gopkg.in/dedis/cothority.v0.git", revision: "e5eb384290e5fd98b8cb150a1348661aa2d49e2a" end def install mkdir_p buildpath/"src/github.com/dedis" ln_s buildpath, buildpath/"src/github.com/dedis/cosi" ENV["GOPATH"] = "#{buildpath}/Godeps/_workspace:#{buildpath}" Language::Go.stage_deps resources, buildpath/"src" system "go", "build", "-o", "cosi" prefix.install "dedis_group.toml" bin.install "cosi" end test do port = free_port (testpath/"config.toml").write <<~EOS Public = "7b6d6361686d0c76d9f4b40961736eb5d0849f7db3f8bfd8f869b8015d831d45" Private = "01a80f4fef21db2aea18e5288fe9aa71324a8ad202609139e5cfffc4ffdc4484" Addresses = ["0.0.0.0:#{port}"] EOS (testpath/"group.toml").write <<~EOS [[servers]] Addresses = ["127.0.0.1:#{port}"] Public = "e21jYWhtDHbZ9LQJYXNutdCEn32z+L/Y+Gm4AV2DHUU=" EOS begin file = prefix/"README.md" sig = "README.sig" pid = fork { exec bin/"cosi", "server", "-config", "config.toml" } sleep 2 assert_match "Success", shell_output("#{bin}/cosi check -g group.toml") system bin/"cosi", "sign", "-g", "group.toml", "-o", sig, file out = shell_output("#{bin}/cosi verify -g group.toml -s #{sig} #{file}") assert_match "OK", out ensure Process.kill("TERM", pid) end end end
bsd-2-clause
d4l3k/motorcar
doc/latex/classmotorcar_1_1Pointer.tex
6837
\hypertarget{classmotorcar_1_1Pointer}{\section{motorcar\-:\-:Pointer Class Reference} \label{classmotorcar_1_1Pointer}\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}} } {\ttfamily \#include $<$pointer.\-h$>$} \subsection*{Public Member Functions} \begin{DoxyCompactItemize} \item \hyperlink{classmotorcar_1_1Pointer_abfbd5397af7d39289d7f3d7eb2db734d}{Pointer} (\hyperlink{classmotorcar_1_1Seat}{Seat} $\ast$seat) \item glm\-::vec2 \hyperlink{classmotorcar_1_1Pointer_a19f25225f16b8f4f2289e008bc75d957}{local\-Positon} () const \item void \hyperlink{classmotorcar_1_1Pointer_abe379b3238e6fe31b959b8affdc00ea2}{set\-Local\-Positon} (const glm\-::vec2 \&\hyperlink{classmotorcar_1_1Pointer_a19f25225f16b8f4f2289e008bc75d957}{local\-Positon}) \item \hyperlink{classmotorcar_1_1WaylandSurface}{Wayland\-Surface} $\ast$ \hyperlink{classmotorcar_1_1Pointer_a6b93402a87e64c673fef44d0102be63b}{focus} () const \item void \hyperlink{classmotorcar_1_1Pointer_afea4df13d7805e699c523c7aaaf30c4c}{set\-Focus} (\hyperlink{classmotorcar_1_1WaylandSurface}{Wayland\-Surface} $\ast$\hyperlink{classmotorcar_1_1Pointer_a6b93402a87e64c673fef44d0102be63b}{focus}) \item \hyperlink{classmotorcar_1_1WaylandSurfaceNode}{Wayland\-Surface\-Node} $\ast$ \hyperlink{classmotorcar_1_1Pointer_a97586b3be93be794761ca2b97864fc9e}{cursor\-Node} () const \item void \hyperlink{classmotorcar_1_1Pointer_a0bb8c2cfc6523155cc4b3f37487937fa}{set\-Cursor\-Node} (\hyperlink{classmotorcar_1_1WaylandSurfaceNode}{Wayland\-Surface\-Node} $\ast$\hyperlink{classmotorcar_1_1Pointer_a97586b3be93be794761ca2b97864fc9e}{cursor\-Node}) \item glm\-::ivec2 \hyperlink{classmotorcar_1_1Pointer_ae847516f0a34b7018bf0cdbd26a7d831}{cursor\-Hotspot} () const \item void \hyperlink{classmotorcar_1_1Pointer_ae8f489d37a9520e41d8f5162cff3992e}{set\-Cursor\-Hotspot} (const glm\-::ivec2 \&\hyperlink{classmotorcar_1_1Pointer_ae847516f0a34b7018bf0cdbd26a7d831}{cursor\-Hotspot}) \end{DoxyCompactItemize} \subsection{Constructor \& Destructor Documentation} \hypertarget{classmotorcar_1_1Pointer_abfbd5397af7d39289d7f3d7eb2db734d}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!Pointer@{Pointer}} \index{Pointer@{Pointer}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{Pointer}]{\setlength{\rightskip}{0pt plus 5cm}Pointer\-::\-Pointer ( \begin{DoxyParamCaption} \item[{{\bf Seat} $\ast$}]{seat} \end{DoxyParamCaption} )}}\label{classmotorcar_1_1Pointer_abfbd5397af7d39289d7f3d7eb2db734d} \subsection{Member Function Documentation} \hypertarget{classmotorcar_1_1Pointer_ae847516f0a34b7018bf0cdbd26a7d831}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!cursor\-Hotspot@{cursor\-Hotspot}} \index{cursor\-Hotspot@{cursor\-Hotspot}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{cursor\-Hotspot}]{\setlength{\rightskip}{0pt plus 5cm}glm\-::ivec2 Pointer\-::cursor\-Hotspot ( \begin{DoxyParamCaption} {} \end{DoxyParamCaption} ) const}}\label{classmotorcar_1_1Pointer_ae847516f0a34b7018bf0cdbd26a7d831} \hypertarget{classmotorcar_1_1Pointer_a97586b3be93be794761ca2b97864fc9e}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!cursor\-Node@{cursor\-Node}} \index{cursor\-Node@{cursor\-Node}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{cursor\-Node}]{\setlength{\rightskip}{0pt plus 5cm}{\bf Wayland\-Surface\-Node} $\ast$ Pointer\-::cursor\-Node ( \begin{DoxyParamCaption} {} \end{DoxyParamCaption} ) const}}\label{classmotorcar_1_1Pointer_a97586b3be93be794761ca2b97864fc9e} \hypertarget{classmotorcar_1_1Pointer_a6b93402a87e64c673fef44d0102be63b}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!focus@{focus}} \index{focus@{focus}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{focus}]{\setlength{\rightskip}{0pt plus 5cm}{\bf Wayland\-Surface} $\ast$ Pointer\-::focus ( \begin{DoxyParamCaption} {} \end{DoxyParamCaption} ) const}}\label{classmotorcar_1_1Pointer_a6b93402a87e64c673fef44d0102be63b} \hypertarget{classmotorcar_1_1Pointer_a19f25225f16b8f4f2289e008bc75d957}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!local\-Positon@{local\-Positon}} \index{local\-Positon@{local\-Positon}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{local\-Positon}]{\setlength{\rightskip}{0pt plus 5cm}glm\-::vec2 Pointer\-::local\-Positon ( \begin{DoxyParamCaption} {} \end{DoxyParamCaption} ) const}}\label{classmotorcar_1_1Pointer_a19f25225f16b8f4f2289e008bc75d957} \hypertarget{classmotorcar_1_1Pointer_ae8f489d37a9520e41d8f5162cff3992e}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!set\-Cursor\-Hotspot@{set\-Cursor\-Hotspot}} \index{set\-Cursor\-Hotspot@{set\-Cursor\-Hotspot}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{set\-Cursor\-Hotspot}]{\setlength{\rightskip}{0pt plus 5cm}void Pointer\-::set\-Cursor\-Hotspot ( \begin{DoxyParamCaption} \item[{const glm\-::ivec2 \&}]{cursor\-Hotspot} \end{DoxyParamCaption} )}}\label{classmotorcar_1_1Pointer_ae8f489d37a9520e41d8f5162cff3992e} \hypertarget{classmotorcar_1_1Pointer_a0bb8c2cfc6523155cc4b3f37487937fa}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!set\-Cursor\-Node@{set\-Cursor\-Node}} \index{set\-Cursor\-Node@{set\-Cursor\-Node}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{set\-Cursor\-Node}]{\setlength{\rightskip}{0pt plus 5cm}void Pointer\-::set\-Cursor\-Node ( \begin{DoxyParamCaption} \item[{{\bf Wayland\-Surface\-Node} $\ast$}]{cursor\-Node} \end{DoxyParamCaption} )}}\label{classmotorcar_1_1Pointer_a0bb8c2cfc6523155cc4b3f37487937fa} \hypertarget{classmotorcar_1_1Pointer_afea4df13d7805e699c523c7aaaf30c4c}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!set\-Focus@{set\-Focus}} \index{set\-Focus@{set\-Focus}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{set\-Focus}]{\setlength{\rightskip}{0pt plus 5cm}void Pointer\-::set\-Focus ( \begin{DoxyParamCaption} \item[{{\bf Wayland\-Surface} $\ast$}]{focus} \end{DoxyParamCaption} )}}\label{classmotorcar_1_1Pointer_afea4df13d7805e699c523c7aaaf30c4c} \hypertarget{classmotorcar_1_1Pointer_abe379b3238e6fe31b959b8affdc00ea2}{\index{motorcar\-::\-Pointer@{motorcar\-::\-Pointer}!set\-Local\-Positon@{set\-Local\-Positon}} \index{set\-Local\-Positon@{set\-Local\-Positon}!motorcar::Pointer@{motorcar\-::\-Pointer}} \subsubsection[{set\-Local\-Positon}]{\setlength{\rightskip}{0pt plus 5cm}void Pointer\-::set\-Local\-Positon ( \begin{DoxyParamCaption} \item[{const glm\-::vec2 \&}]{local\-Positon} \end{DoxyParamCaption} )}}\label{classmotorcar_1_1Pointer_abe379b3238e6fe31b959b8affdc00ea2} The documentation for this class was generated from the following files\-:\begin{DoxyCompactItemize} \item /home/dave/thesis/motorcar/src/compositor/wayland/input/\hyperlink{pointer_8h}{pointer.\-h}\item /home/dave/thesis/motorcar/src/compositor/wayland/input/\hyperlink{pointer_8cpp}{pointer.\-cpp}\end{DoxyCompactItemize}
bsd-2-clause
sscotth/homebrew-cask
Casks/boom.rb
1400
cask 'boom' do version '1.6.9,1575451705' sha256 '444b5513c92eb0975494509908786a31f087a0af0e58fa5f312a156318be22f8' # devmate.com/com.globaldelight.Boom2/ was verified as official when first introduced to the cask url "https://dl.devmate.com/com.globaldelight.Boom2/#{version.before_comma}/#{version.after_comma}/Boom2-#{version.before_comma}.dmg" appcast 'https://updates.devmate.com/com.globaldelight.Boom2.xml' name 'Boom' homepage 'https://www.globaldelight.com/boom' depends_on macos: '>= :yosemite' app 'Boom 2.app' uninstall kext: 'com.globaldelight.driver.Boom2Device', launchctl: [ 'com.globaldelight.Boom2.*', 'com.globaldelight.Boom2Daemon', ], signal: ['TERM', 'com.globaldelight.Boom2'] zap trash: [ '~/Library/Application Support/com.globaldelight.Boom2', '~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.globaldelight.boom2.sfl*', '~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.globaldelight.boom2daemon.sfl*', '~/Library/Preferences/com.globaldelight.Boom2.plist', '~/Library/Preferences/com.globaldelight.Boom2Daemon.plist', ] end
bsd-2-clause
sebastienros/jint
Jint.Tests.Test262/test/language/expressions/equals/coerce-symbol-to-prim-invocation.js
1426
// Copyright (C) 2015 the V8 project authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- es6id: 12.10.3 description: Invocation of `Symbol.toPrimitive` method during coercion info: | [...] 7. Return the result of performing Abstract Equality Comparison rval == lval. ES6 Section 7.2.12 Abstract Equality Comparison [...] 10. If Type(x) is either String, Number, or Symbol and Type(y) is Object, then return the result of the comparison x == ToPrimitive(y). ES6 Section 7.1.1 ToPrimitive ( input [, PreferredType] ) 1. If PreferredType was not passed, let hint be "default". [...] 4. Let exoticToPrim be GetMethod(input, @@toPrimitive). 5. ReturnIfAbrupt(exoticToPrim). 6. If exoticToPrim is not undefined, then a. Let result be Call(exoticToPrim, input, «hint»). [...] features: [Symbol.toPrimitive] ---*/ var y = {}; var callCount = 0; var thisVal, args; y[Symbol.toPrimitive] = function() { callCount += 1; thisVal = this; args = arguments; }; 0 == y; assert.sameValue(callCount, 1, 'method invoked exactly once'); assert.sameValue(thisVal, y, '`this` value is the object being compared'); assert.sameValue(args.length, 1, 'method invoked with exactly one argument'); assert.sameValue( args[0], 'default', 'method invoked with the string "default" as the first argument' );
bsd-2-clause
pacav69/homebrew-cask
Casks/jetbrains-toolbox.rb
326
cask 'jetbrains-toolbox' do version '1.0.1569' sha256 '5e47e404f7b9aa6e5d500eceb59801a9c1dc4da104e29fe1e392956188369b71' url "https://download.jetbrains.com/toolbox/jetbrains-toolbox-#{version}.dmg" name 'JetBrains Toolbox' homepage 'https://www.jetbrains.com/' license :gratis app 'JetBrains Toolbox.app' end
bsd-2-clause
Pushjet/Pushjet-Android
gradle/wrapper/dists/gradle-2.2.1-all/c64ydeuardnfqctvr1gm30w53/gradle-2.2.1/src/wrapper/org/gradle/wrapper/Install.java
7302
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.wrapper; import java.io.*; import java.net.URI; import java.util.*; import java.util.concurrent.Callable; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; public class Install { public static final String DEFAULT_DISTRIBUTION_PATH = "wrapper/dists"; private final IDownload download; private final PathAssembler pathAssembler; private final ExclusiveFileAccessManager exclusiveFileAccessManager = new ExclusiveFileAccessManager(120000, 200); public Install(IDownload download, PathAssembler pathAssembler) { this.download = download; this.pathAssembler = pathAssembler; } public File createDist(WrapperConfiguration configuration) throws Exception { final URI distributionUrl = configuration.getDistribution(); final PathAssembler.LocalDistribution localDistribution = pathAssembler.getDistribution(configuration); final File distDir = localDistribution.getDistributionDir(); final File localZipFile = localDistribution.getZipFile(); return exclusiveFileAccessManager.access(localZipFile, new Callable<File>() { public File call() throws Exception { final File markerFile = new File(localZipFile.getParentFile(), localZipFile.getName() + ".ok"); if (distDir.isDirectory() && markerFile.isFile()) { return getDistributionRoot(distDir, distDir.getAbsolutePath()); } boolean needsDownload = !localZipFile.isFile(); if (needsDownload) { File tmpZipFile = new File(localZipFile.getParentFile(), localZipFile.getName() + ".part"); tmpZipFile.delete(); System.out.println("Downloading " + distributionUrl); download.download(distributionUrl, tmpZipFile); tmpZipFile.renameTo(localZipFile); } List<File> topLevelDirs = listDirs(distDir); for (File dir : topLevelDirs) { System.out.println("Deleting directory " + dir.getAbsolutePath()); deleteDir(dir); } System.out.println("Unzipping " + localZipFile.getAbsolutePath() + " to " + distDir.getAbsolutePath()); unzip(localZipFile, distDir); File root = getDistributionRoot(distDir, distributionUrl.toString()); setExecutablePermissions(root); markerFile.createNewFile(); return root; } }); } private File getDistributionRoot(File distDir, String distributionDescription) { List<File> dirs = listDirs(distDir); if (dirs.isEmpty()) { throw new RuntimeException(String.format("Gradle distribution '%s' does not contain any directories. Expected to find exactly 1 directory.", distributionDescription)); } if (dirs.size() != 1) { throw new RuntimeException(String.format("Gradle distribution '%s' contains too many directories. Expected to find exactly 1 directory.", distributionDescription)); } return dirs.get(0); } private List<File> listDirs(File distDir) { List<File> dirs = new ArrayList<File>(); if (distDir.exists()) { for (File file : distDir.listFiles()) { if (file.isDirectory()) { dirs.add(file); } } } return dirs; } private void setExecutablePermissions(File gradleHome) { if (isWindows()) { return; } File gradleCommand = new File(gradleHome, "bin/gradle"); String errorMessage = null; try { ProcessBuilder pb = new ProcessBuilder("chmod", "755", gradleCommand.getCanonicalPath()); Process p = pb.start(); if (p.waitFor() == 0) { System.out.println("Set executable permissions for: " + gradleCommand.getAbsolutePath()); } else { BufferedReader is = new BufferedReader(new InputStreamReader(p.getInputStream())); Formatter stdout = new Formatter(); String line; while ((line = is.readLine()) != null) { stdout.format("%s%n", line); } errorMessage = stdout.toString(); } } catch (IOException e) { errorMessage = e.getMessage(); } catch (InterruptedException e) { errorMessage = e.getMessage(); } if (errorMessage != null) { System.out.println("Could not set executable permissions for: " + gradleCommand.getAbsolutePath()); System.out.println("Please do this manually if you want to use the Gradle UI."); } } private boolean isWindows() { String osName = System.getProperty("os.name").toLowerCase(Locale.US); if (osName.indexOf("windows") > -1) { return true; } return false; } private boolean deleteDir(File dir) { if (dir.isDirectory()) { String[] children = dir.list(); for (int i = 0; i < children.length; i++) { boolean success = deleteDir(new File(dir, children[i])); if (!success) { return false; } } } // The directory is now empty so delete it return dir.delete(); } private void unzip(File zip, File dest) throws IOException { Enumeration entries; ZipFile zipFile = new ZipFile(zip); try { entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = (ZipEntry) entries.nextElement(); if (entry.isDirectory()) { (new File(dest, entry.getName())).mkdirs(); continue; } OutputStream outputStream = new BufferedOutputStream(new FileOutputStream(new File(dest, entry.getName()))); try { copyInputStream(zipFile.getInputStream(entry), outputStream); } finally { outputStream.close(); } } } finally { zipFile.close(); } } private void copyInputStream(InputStream in, OutputStream out) throws IOException { byte[] buffer = new byte[1024]; int len; while ((len = in.read(buffer)) >= 0) { out.write(buffer, 0, len); } in.close(); out.close(); } }
bsd-2-clause
jordemort/evas
src/modules/engines/gl_common/shader/yuv_frag_s3c6410.asm
1354
#------------------------------------------------- # ORION - OpenGL ES 2.0 Shading Language Compiler # SAMSUNG INDIA SOFTWARE OPERATIONS PVT. LTD. # Compiler Version : v04.00.09 # Release Date : 19.01.2009 # FIMG VERSION : FIMGv1.5 # Optimizer Options : -O --nolodcalc #------------------------------------------------- # hand optimised - removed useless ops ps_3_0 fimg_version 0x01020000 dcl_s2_tex s0 dcl_s2_texu s1 dcl_s2_texv s2 dcl_f4_col v3.x dcl_f2_tex_c v0.x dcl_f2_tex_c2 v1.x dcl_f2_tex_c3 v2.x def c2, 1.164000, 1.164000, 1.164000, 0.000000 def c3, 0.000000, -0.344100, 1.772000, 0.000000 def c4, 1.402000, -0.714100, 0.000000, 0.000000 def c5, -0.773800, 0.456300, -0.958800, 1.000000 def c6, 1.000000, 0.000000, 0.000000, 0.000000 label start label main_ mul r0.xyzw, c6.xxyy, v0.xyyy # tex_c=v0.xyyy texld r0.xyzw, r0.xyzw, s0 # tex=s0 mul r1.xyzw, c6.xxyy, v1.xyyy # tex_c2=v1.xyyy texld r1.xyzw, r1.xyzw, s1 # texu=s1 mul r2.xyzw, c6.xxyy, v2.xyyy # tex_c3=v2.xyyy texld r2.xyzw, r2.xyzw, s2 # texv=s2 mul r3.xyzw, c2.xyzw, r0.xxxx # yuv=r0.xxxx mad r3.xyzw, c3.xyzw, r1.xxxx, r3.xyzw # yuv=r1.xxxx mad r3.xyzw, c4.xyzw, r2.xxxx, r3.xyzw # yuv=r2.xxxx mov r4.xyzw, c5.xyzw mad r3.xyzw, r4.xyzw, c6.xxxx, r3.xyzw # yuv=c6.xxxx mul_sat oColor.xyzw, r3.xyzw, v3.xyzw # gl_FragColor=oColor.xyzw, col=v3.xyzw label main_end ret
bsd-2-clause