text
stringlengths 2
1.04M
| meta
dict |
---|---|
import typing
from ... import exc
from ... import util
from ...sql import coercions
from ...sql import elements
from ...sql import operators
from ...sql import roles
from ...sql.base import _generative
from ...sql.base import Generative
Selfmatch = typing.TypeVar("Selfmatch", bound="match")
class match(Generative, elements.BinaryExpression):
"""Produce a ``MATCH (X, Y) AGAINST ('TEXT')`` clause.
E.g.::
from sqlalchemy import desc
from sqlalchemy.dialects.mysql import match
match_expr = match(
users_table.c.firstname,
users_table.c.lastname,
against="Firstname Lastname",
)
stmt = (
select(users_table)
.where(match_expr.in_boolean_mode())
.order_by(desc(match_expr))
)
Would produce SQL resembling::
SELECT id, firstname, lastname
FROM user
WHERE MATCH(firstname, lastname) AGAINST (:param_1 IN BOOLEAN MODE)
ORDER BY MATCH(firstname, lastname) AGAINST (:param_2) DESC
The :func:`_mysql.match` function is a standalone version of the
:meth:`_sql.ColumnElement.match` method available on all
SQL expressions, as when :meth:`_expression.ColumnElement.match` is
used, but allows to pass multiple columns
:param cols: column expressions to match against
:param against: expression to be compared towards
:param in_boolean_mode: boolean, set "boolean mode" to true
:param in_natural_language_mode: boolean , set "natural language" to true
:param with_query_expansion: boolean, set "query expansion" to true
.. versionadded:: 1.4.19
.. seealso::
:meth:`_expression.ColumnElement.match`
"""
__visit_name__ = "mysql_match"
inherit_cache = True
def __init__(self, *cols, **kw):
if not cols:
raise exc.ArgumentError("columns are required")
against = kw.pop("against", None)
if against is None:
raise exc.ArgumentError("against is required")
against = coercions.expect(
roles.ExpressionElementRole,
against,
)
left = elements.BooleanClauseList._construct_raw(
operators.comma_op,
clauses=cols,
)
left.group = False
flags = util.immutabledict(
{
"mysql_boolean_mode": kw.pop("in_boolean_mode", False),
"mysql_natural_language": kw.pop(
"in_natural_language_mode", False
),
"mysql_query_expansion": kw.pop("with_query_expansion", False),
}
)
if kw:
raise exc.ArgumentError("unknown arguments: %s" % (", ".join(kw)))
super().__init__(left, against, operators.match_op, modifiers=flags)
@_generative
def in_boolean_mode(self: Selfmatch) -> Selfmatch:
"""Apply the "IN BOOLEAN MODE" modifier to the MATCH expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_boolean_mode": True})
return self
@_generative
def in_natural_language_mode(self: Selfmatch) -> Selfmatch:
"""Apply the "IN NATURAL LANGUAGE MODE" modifier to the MATCH
expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_natural_language": True})
return self
@_generative
def with_query_expansion(self: Selfmatch) -> Selfmatch:
"""Apply the "WITH QUERY EXPANSION" modifier to the MATCH expression.
:return: a new :class:`_mysql.match` instance with modifications
applied.
"""
self.modifiers = self.modifiers.union({"mysql_query_expansion": True})
return self
| {
"content_hash": "454cdafb4f0031b90dcd00d02e8bd09b",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 79,
"avg_line_length": 28.764705882352942,
"alnum_prop": 0.6083844580777096,
"repo_name": "zzzeek/sqlalchemy",
"id": "561803a78d8bdf98ddd37a17e3f8b925287c5303",
"size": "4154",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "lib/sqlalchemy/dialects/mysql/expression.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "21698"
},
{
"name": "Python",
"bytes": "16838583"
}
],
"symlink_target": ""
} |
/* *
*
* (c) 2009-2020 Øystein Moseng
*
* Place desriptions on a series and its points.
*
* License: www.highcharts.com/license
*
* !!!!!!! SOURCE GETS TRANSPILED BY TYPESCRIPT. EDIT TS FILE ONLY. !!!!!!!
*
* */
'use strict';
import H from '../../../../parts/Globals.js';
var numberFormat = H.numberFormat, format = H.format;
import U from '../../../../parts/Utilities.js';
var find = U.find, isNumber = U.isNumber, pick = U.pick, defined = U.defined;
import AnnotationsA11y from '../AnnotationsA11y.js';
var getPointAnnotationTexts = AnnotationsA11y.getPointAnnotationTexts;
import HTMLUtilities from '../../utils/htmlUtilities.js';
var escapeStringForHTML = HTMLUtilities.escapeStringForHTML, reverseChildNodes = HTMLUtilities.reverseChildNodes, stripHTMLTags = HTMLUtilities.stripHTMLTagsFromString;
import ChartUtilities from '../../utils/chartUtilities.js';
var getAxisDescription = ChartUtilities.getAxisDescription, getSeriesFirstPointElement = ChartUtilities.getSeriesFirstPointElement, getSeriesA11yElement = ChartUtilities.getSeriesA11yElement, unhideChartElementFromAT = ChartUtilities.unhideChartElementFromAT;
import Tooltip from '../../../../parts/Tooltip.js';
/* eslint-disable valid-jsdoc */
/**
* @private
*/
function findFirstPointWithGraphic(point) {
var sourcePointIndex = point.index;
if (!point.series || !point.series.data || !defined(sourcePointIndex)) {
return null;
}
return find(point.series.data, function (p) {
return !!(p &&
typeof p.index !== 'undefined' &&
p.index > sourcePointIndex &&
p.graphic &&
p.graphic.element);
}) || null;
}
/**
* @private
*/
function shouldAddDummyPoint(point) {
// Note: Sunburst series use isNull for hidden points on drilldown.
// Ignore these.
var isSunburst = point.series && point.series.is('sunburst'), isNull = point.isNull;
return isNull && !isSunburst;
}
/**
* @private
*/
function makeDummyElement(point, pos) {
var renderer = point.series.chart.renderer, dummy = renderer.rect(pos.x, pos.y, 1, 1);
dummy.attr({
'class': 'highcharts-a11y-dummy-point',
fill: 'none',
opacity: 0,
'fill-opacity': 0,
'stroke-opacity': 0
});
return dummy;
}
/**
* @private
* @param {Highcharts.Point} point
* @return {Highcharts.HTMLDOMElement|Highcharts.SVGDOMElement|undefined}
*/
function addDummyPointElement(point) {
var series = point.series, firstPointWithGraphic = findFirstPointWithGraphic(point), firstGraphic = firstPointWithGraphic && firstPointWithGraphic.graphic, parentGroup = firstGraphic ?
firstGraphic.parentGroup :
series.graph || series.group, dummyPos = firstPointWithGraphic ? {
x: pick(point.plotX, firstPointWithGraphic.plotX, 0),
y: pick(point.plotY, firstPointWithGraphic.plotY, 0)
} : {
x: pick(point.plotX, 0),
y: pick(point.plotY, 0)
}, dummyElement = makeDummyElement(point, dummyPos);
if (parentGroup && parentGroup.element) {
point.graphic = dummyElement;
point.hasDummyGraphic = true;
dummyElement.add(parentGroup);
// Move to correct pos in DOM
parentGroup.element.insertBefore(dummyElement.element, firstGraphic ? firstGraphic.element : null);
return dummyElement.element;
}
}
/**
* @private
* @param {Highcharts.Series} series
* @return {boolean}
*/
function hasMorePointsThanDescriptionThreshold(series) {
var chartA11yOptions = series.chart.options.accessibility, threshold = (chartA11yOptions.series.pointDescriptionEnabledThreshold);
return !!(threshold !== false &&
series.points &&
series.points.length >= threshold);
}
/**
* @private
* @param {Highcharts.Series} series
* @return {boolean}
*/
function shouldSetScreenReaderPropsOnPoints(series) {
var seriesA11yOptions = series.options.accessibility || {};
return !hasMorePointsThanDescriptionThreshold(series) &&
!seriesA11yOptions.exposeAsGroupOnly;
}
/**
* @private
* @param {Highcharts.Series} series
* @return {boolean}
*/
function shouldSetKeyboardNavPropsOnPoints(series) {
var chartA11yOptions = series.chart.options.accessibility, seriesNavOptions = chartA11yOptions.keyboardNavigation.seriesNavigation;
return !!(series.points && (series.points.length <
seriesNavOptions.pointNavigationEnabledThreshold ||
seriesNavOptions.pointNavigationEnabledThreshold === false));
}
/**
* @private
* @param {Highcharts.Series} series
* @return {boolean}
*/
function shouldDescribeSeriesElement(series) {
var chart = series.chart, chartOptions = chart.options.chart || {}, chartHas3d = chartOptions.options3d && chartOptions.options3d.enabled, hasMultipleSeries = chart.series.length > 1, describeSingleSeriesOption = chart.options.accessibility.series.describeSingleSeries, exposeAsGroupOnlyOption = (series.options.accessibility || {}).exposeAsGroupOnly, noDescribe3D = chartHas3d && hasMultipleSeries;
return !noDescribe3D && (hasMultipleSeries || describeSingleSeriesOption ||
exposeAsGroupOnlyOption || hasMorePointsThanDescriptionThreshold(series));
}
/**
* @private
* @param {Highcharts.Point} point
* @param {number} value
* @return {string}
*/
function pointNumberToString(point, value) {
var chart = point.series.chart, a11yPointOptions = chart.options.accessibility.point || {}, tooltipOptions = point.series.tooltipOptions || {}, lang = chart.options.lang;
if (isNumber(value)) {
return numberFormat(value, a11yPointOptions.valueDecimals ||
tooltipOptions.valueDecimals ||
-1, lang.decimalPoint, lang.accessibility.thousandsSep || lang.thousandsSep);
}
return value;
}
/**
* @private
* @param {Highcharts.Series} series
* @return {string}
*/
function getSeriesDescriptionText(series) {
var seriesA11yOptions = series.options.accessibility || {}, descOpt = seriesA11yOptions.description;
return descOpt && series.chart.langFormat('accessibility.series.description', {
description: descOpt,
series: series
}) || '';
}
/**
* @private
* @param {Highcharts.series} series
* @param {string} axisCollection
* @return {string}
*/
function getSeriesAxisDescriptionText(series, axisCollection) {
var axis = series[axisCollection];
return series.chart.langFormat('accessibility.series.' + axisCollection + 'Description', {
name: getAxisDescription(axis),
series: series
});
}
/**
* Get accessible time description for a point on a datetime axis.
*
* @private
* @function Highcharts.Point#getTimeDescription
* @param {Highcharts.Point} point
* @return {string|undefined}
* The description as string.
*/
function getPointA11yTimeDescription(point) {
var series = point.series, chart = series.chart, a11yOptions = chart.options.accessibility.point || {}, hasDateXAxis = series.xAxis && series.xAxis.isDatetimeAxis;
if (hasDateXAxis) {
var tooltipDateFormat = Tooltip.prototype.getXDateFormat.call({
getDateFormat: Tooltip.prototype.getDateFormat,
chart: chart
}, point, chart.options.tooltip, series.xAxis), dateFormat = a11yOptions.dateFormatter &&
a11yOptions.dateFormatter(point) ||
a11yOptions.dateFormat ||
tooltipDateFormat;
return chart.time.dateFormat(dateFormat, point.x, void 0);
}
}
/**
* @private
* @param {Highcharts.Point} point
* @return {string}
*/
function getPointXDescription(point) {
var timeDesc = getPointA11yTimeDescription(point), xAxis = point.series.xAxis || {}, pointCategory = xAxis.categories && defined(point.category) &&
('' + point.category).replace('<br/>', ' '), canUseId = point.id && point.id.indexOf('highcharts-') < 0, fallback = 'x, ' + point.x;
return point.name || timeDesc || pointCategory ||
(canUseId ? point.id : fallback);
}
/**
* @private
* @param {Highcharts.Point} point
* @param {string} prefix
* @param {string} suffix
* @return {string}
*/
function getPointArrayMapValueDescription(point, prefix, suffix) {
var pre = prefix || '', suf = suffix || '', keyToValStr = function (key) {
var num = pointNumberToString(point, pick(point[key], point.options[key]));
return key + ': ' + pre + num + suf;
}, pointArrayMap = point.series.pointArrayMap;
return pointArrayMap.reduce(function (desc, key) {
return desc + (desc.length ? ', ' : '') + keyToValStr(key);
}, '');
}
/**
* @private
* @param {Highcharts.Point} point
* @return {string}
*/
function getPointValue(point) {
var series = point.series, a11yPointOpts = series.chart.options.accessibility.point || {}, tooltipOptions = series.tooltipOptions || {}, valuePrefix = a11yPointOpts.valuePrefix ||
tooltipOptions.valuePrefix || '', valueSuffix = a11yPointOpts.valueSuffix ||
tooltipOptions.valueSuffix || '', fallbackKey = (typeof point.value !==
'undefined' ?
'value' : 'y'), fallbackDesc = pointNumberToString(point, point[fallbackKey]);
if (point.isNull) {
return series.chart.langFormat('accessibility.series.nullPointValue', {
point: point
});
}
if (series.pointArrayMap) {
return getPointArrayMapValueDescription(point, valuePrefix, valueSuffix);
}
return valuePrefix + fallbackDesc + valueSuffix;
}
/**
* Return the description for the annotation(s) connected to a point, or empty
* string if none.
*
* @private
* @param {Highcharts.Point} point The data point to get the annotation info from.
* @return {string} Annotation description
*/
function getPointAnnotationDescription(point) {
var chart = point.series.chart;
var langKey = 'accessibility.series.pointAnnotationsDescription';
var annotations = getPointAnnotationTexts(point);
var context = { point: point, annotations: annotations };
return annotations.length ? chart.langFormat(langKey, context) : '';
}
/**
* Return string with information about point.
* @private
* @return {string}
*/
function getPointValueDescription(point) {
var series = point.series, chart = series.chart, pointValueDescriptionFormat = chart.options.accessibility
.point.valueDescriptionFormat, showXDescription = pick(series.xAxis &&
series.xAxis.options.accessibility &&
series.xAxis.options.accessibility.enabled, !chart.angular), xDesc = showXDescription ? getPointXDescription(point) : '', context = {
point: point,
index: defined(point.index) ? (point.index + 1) : '',
xDescription: xDesc,
value: getPointValue(point),
separator: showXDescription ? ', ' : ''
};
return format(pointValueDescriptionFormat, context, chart);
}
/**
* Return string with information about point.
* @private
* @return {string}
*/
function defaultPointDescriptionFormatter(point) {
var series = point.series, chart = series.chart, valText = getPointValueDescription(point), description = point.options && point.options.accessibility &&
point.options.accessibility.description, userDescText = description ? ' ' + description : '', seriesNameText = chart.series.length > 1 && series.name ?
' ' + series.name + '.' : '', annotationsDesc = getPointAnnotationDescription(point), pointAnnotationsText = annotationsDesc ? ' ' + annotationsDesc : '';
point.accessibility = point.accessibility || {};
point.accessibility.valueDescription = valText;
return valText + userDescText + seriesNameText + pointAnnotationsText;
}
/**
* Set a11y props on a point element
* @private
* @param {Highcharts.Point} point
* @param {Highcharts.HTMLDOMElement|Highcharts.SVGDOMElement} pointElement
*/
function setPointScreenReaderAttribs(point, pointElement) {
var series = point.series, a11yPointOptions = series.chart.options.accessibility.point || {}, seriesA11yOptions = series.options.accessibility || {}, label = escapeStringForHTML(stripHTMLTags(seriesA11yOptions.pointDescriptionFormatter &&
seriesA11yOptions.pointDescriptionFormatter(point) ||
a11yPointOptions.descriptionFormatter &&
a11yPointOptions.descriptionFormatter(point) ||
defaultPointDescriptionFormatter(point)));
pointElement.setAttribute('role', 'img');
pointElement.setAttribute('aria-label', label);
}
/**
* Add accessible info to individual point elements of a series
* @private
* @param {Highcharts.Series} series
*/
function describePointsInSeries(series) {
var setScreenReaderProps = shouldSetScreenReaderPropsOnPoints(series), setKeyboardProps = shouldSetKeyboardNavPropsOnPoints(series);
if (setScreenReaderProps || setKeyboardProps) {
series.points.forEach(function (point) {
var pointEl = point.graphic && point.graphic.element ||
shouldAddDummyPoint(point) && addDummyPointElement(point);
if (pointEl) {
// We always set tabindex, as long as we are setting
// props.
pointEl.setAttribute('tabindex', '-1');
if (setScreenReaderProps) {
setPointScreenReaderAttribs(point, pointEl);
}
else {
pointEl.setAttribute('aria-hidden', true);
}
}
});
}
}
/**
* Return string with information about series.
* @private
* @return {string}
*/
function defaultSeriesDescriptionFormatter(series) {
var chart = series.chart, chartTypes = chart.types || [], description = getSeriesDescriptionText(series), shouldDescribeAxis = function (coll) {
return chart[coll] && chart[coll].length > 1 && series[coll];
}, xAxisInfo = getSeriesAxisDescriptionText(series, 'xAxis'), yAxisInfo = getSeriesAxisDescriptionText(series, 'yAxis'), summaryContext = {
name: series.name || '',
ix: series.index + 1,
numSeries: chart.series && chart.series.length,
numPoints: series.points && series.points.length,
series: series
}, combinationSuffix = chartTypes.length > 1 ? 'Combination' : '', summary = chart.langFormat('accessibility.series.summary.' + series.type + combinationSuffix, summaryContext) || chart.langFormat('accessibility.series.summary.default' + combinationSuffix, summaryContext);
return summary + (description ? ' ' + description : '') + (shouldDescribeAxis('yAxis') ? ' ' + yAxisInfo : '') + (shouldDescribeAxis('xAxis') ? ' ' + xAxisInfo : '');
}
/**
* Set a11y props on a series element
* @private
* @param {Highcharts.Series} series
* @param {Highcharts.HTMLDOMElement|Highcharts.SVGDOMElement} seriesElement
*/
function describeSeriesElement(series, seriesElement) {
var seriesA11yOptions = series.options.accessibility || {}, a11yOptions = series.chart.options.accessibility, landmarkVerbosity = a11yOptions.landmarkVerbosity;
// Handle role attribute
if (seriesA11yOptions.exposeAsGroupOnly) {
seriesElement.setAttribute('role', 'img');
}
else if (landmarkVerbosity === 'all') {
seriesElement.setAttribute('role', 'region');
} /* else do not add role */
seriesElement.setAttribute('tabindex', '-1');
seriesElement.setAttribute('aria-label', escapeStringForHTML(stripHTMLTags(a11yOptions.series.descriptionFormatter &&
a11yOptions.series.descriptionFormatter(series) ||
defaultSeriesDescriptionFormatter(series))));
}
/**
* Put accessible info on series and points of a series.
* @param {Highcharts.Series} series The series to add info on.
*/
function describeSeries(series) {
var chart = series.chart, firstPointEl = getSeriesFirstPointElement(series), seriesEl = getSeriesA11yElement(series), is3d = chart.is3d && chart.is3d();
if (seriesEl) {
// For some series types the order of elements do not match the
// order of points in series. In that case we have to reverse them
// in order for AT to read them out in an understandable order.
// Due to z-index issues we can not do this for 3D charts.
if (seriesEl.lastChild === firstPointEl && !is3d) {
reverseChildNodes(seriesEl);
}
describePointsInSeries(series);
unhideChartElementFromAT(chart, seriesEl);
if (shouldDescribeSeriesElement(series)) {
describeSeriesElement(series, seriesEl);
}
else {
seriesEl.setAttribute('aria-label', '');
}
}
}
var SeriesDescriber = {
describeSeries: describeSeries,
defaultPointDescriptionFormatter: defaultPointDescriptionFormatter,
defaultSeriesDescriptionFormatter: defaultSeriesDescriptionFormatter,
getPointA11yTimeDescription: getPointA11yTimeDescription,
getPointXDescription: getPointXDescription,
getPointValue: getPointValue,
getPointValueDescription: getPointValueDescription
};
export default SeriesDescriber;
| {
"content_hash": "b85e04cf1dfc5df24a99e79ddac78313",
"timestamp": "",
"source": "github",
"line_count": 396,
"max_line_length": 403,
"avg_line_length": 42.79040404040404,
"alnum_prop": 0.6934198878725287,
"repo_name": "cdnjs/cdnjs",
"id": "452e1e760bb5c110660717a18e8d3f504bd725ce",
"size": "16946",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "ajax/libs/highcharts/8.0.3/es-modules/modules/accessibility/components/SeriesComponent/SeriesDescriber.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
package org.apache.geode.internal.cache.ha;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.internal.AvailablePortHelper.getRandomAvailableTCPPort;
import static org.apache.geode.test.dunit.Assert.assertEquals;
import static org.apache.geode.test.dunit.Assert.assertNotNull;
import static org.apache.geode.test.dunit.Assert.assertTrue;
import static org.apache.geode.test.dunit.Assert.fail;
import java.net.ConnectException;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.cache.InterestResultPolicy;
import org.apache.geode.cache.Operation;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.cache.util.CacheListenerAdapter;
import org.apache.geode.cache30.ClientServerTestCase;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.cache.EventID;
import org.apache.geode.internal.cache.FilterRoutingInfo.FilterInfo;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.RegionEventImpl;
import org.apache.geode.internal.cache.tier.sockets.CacheClientNotifier;
import org.apache.geode.internal.cache.tier.sockets.ClientTombstoneMessage;
import org.apache.geode.internal.cache.tier.sockets.ConflationDistributedTestHelper;
import org.apache.geode.internal.cache.versions.VersionSource;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.Invoke;
import org.apache.geode.test.dunit.NetworkUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.Wait;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
import org.apache.geode.test.junit.categories.ClientSubscriptionTest;
/**
* Client is connected to S1 which has a slow dispatcher. Puts are made on S1. Then S2 is started
* and made available for the client. After that , S1 's server is stopped. The client fails over to
* S2. The client should receive all the puts . These puts have arrived on S2 via GII of HARegion.
*/
@Category({ClientSubscriptionTest.class})
public class HAGIIDistributedTest extends JUnit4DistributedTestCase {
private static Cache cache = null;
// server
private static VM server0 = null;
private static VM server1 = null;
private static VM client0 = null;
private static final String REGION_NAME = HAGIIDistributedTest.class.getSimpleName() + "_region";
protected static GIIChecker checker = new GIIChecker();
private int PORT2;
@Override
public final void postSetUp() throws Exception {
final Host host = Host.getHost(0);
// server
server0 = host.getVM(0);
server1 = host.getVM(1);
// client
client0 = host.getVM(2);
// start server1
int PORT1 = server0.invoke(HAGIIDistributedTest::createServer1Cache);
server0.invoke(() -> ConflationDistributedTestHelper.setIsSlowStart());
server0.invoke(HAGIIDistributedTest::setSystemProperty);
PORT2 = getRandomAvailableTCPPort();
// Start the client
client0
.invoke(() -> HAGIIDistributedTest.createClientCache(NetworkUtils.getServerHostName(host),
PORT1, PORT2));
client0.invoke(() -> checker.resetUpdateCounter());
}
@Test
public void testGIIRegionQueue() {
try (IgnoredException ignoredException =
IgnoredException.addIgnoredException(ConnectException.class)) {
client0.invoke(HAGIIDistributedTest::createEntries);
client0.invoke(HAGIIDistributedTest::registerInterestList);
server0.invoke(HAGIIDistributedTest::put);
server0.invoke(HAGIIDistributedTest::tombstonegc);
client0.invoke(HAGIIDistributedTest::verifyEntries);
server1.invoke(HAGIIDistributedTest.class, "createServer2Cache", new Object[] {PORT2});
Wait.pause(6000);
server0.invoke(HAGIIDistributedTest::stopServer);
// pause(10000);
client0.invoke(HAGIIDistributedTest::verifyEntriesAfterGiiViaListener);
}
}
public void createCache(Properties props) throws Exception {
DistributedSystem ds = getSystem(props);
ds.disconnect();
ds = getSystem(props);
assertNotNull(ds);
cache = CacheFactory.create(ds);
assertNotNull(cache);
}
public static void createClientCache(String host, Integer port1, Integer port2) throws Exception {
int PORT1 = port1;
int PORT2 = port2;
Properties props = new Properties();
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "");
new HAGIIDistributedTest().createCache(props);
AttributesFactory factory = new AttributesFactory();
ClientServerTestCase.configureConnectionPool(factory, host, new int[] {PORT1, PORT2}, true, -1,
2, null, 1000, -1, -1);
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.addCacheListener(HAGIIDistributedTest.checker);
RegionAttributes attrs = factory.create();
cache.createRegion(REGION_NAME, attrs);
}
public static Integer createServer1Cache() throws Exception {
new HAGIIDistributedTest().createCache(new Properties());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes attrs = factory.create();
cache.createRegion(REGION_NAME, attrs);
int port = getRandomAvailableTCPPort();
CacheServer server1 = cache.addCacheServer();
server1.setPort(port);
server1.setNotifyBySubscription(true);
server1.start();
return server1.getPort();
}
public static void createServer2Cache(Integer port) throws Exception {
new HAGIIDistributedTest().createCache(new Properties());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes attrs = factory.create();
cache.createRegion(REGION_NAME, attrs);
CacheServer server1 = cache.addCacheServer();
server1.setPort(port);
server1.setNotifyBySubscription(true);
server1.start();
}
public static void registerInterestList() {
try {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
r.registerInterest("key-1", InterestResultPolicy.KEYS_VALUES);
r.registerInterest("key-2", InterestResultPolicy.KEYS_VALUES);
r.registerInterest("key-3", InterestResultPolicy.KEYS_VALUES);
} catch (Exception ex) {
Assert.fail("failed while registering keys ", ex);
}
}
public static void createEntries() {
try {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
r.create("key-1", "key-1");
r.create("key-2", "key-2");
r.create("key-3", "key-3");
} catch (Exception ex) {
Assert.fail("failed while createEntries()", ex);
}
}
public static void stopServer() {
try {
Iterator iter = cache.getCacheServers().iterator();
if (iter.hasNext()) {
CacheServer server = (CacheServer) iter.next();
server.stop();
}
} catch (Exception e) {
fail("failed while stopServer()", e);
}
}
public static void put() {
try {
Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
r.put("key-1", "value-1");
r.put("key-2", "value-2");
r.put("key-3", "value-3");
} catch (Exception ex) {
Assert.fail("failed while r.put()", ex);
}
}
/** queue a tombstone GC message for the client. See bug #46832 */
public static void tombstonegc() throws Exception {
LocalRegion r = (LocalRegion) cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
DistributedMember id = r.getCache().getDistributedSystem().getDistributedMember();
RegionEventImpl regionEvent = new RegionEventImpl(r, Operation.REGION_DESTROY, null, true, id);
FilterInfo clientRouting = r.getFilterProfile().getLocalFilterRouting(regionEvent);
assertTrue(clientRouting.getInterestedClients().size() > 0);
regionEvent.setLocalFilterInfo(clientRouting);
Map<VersionSource<?>, Long> map = Collections.emptyMap();
ClientTombstoneMessage message =
ClientTombstoneMessage.gc(r, map, new EventID(r.getCache().getDistributedSystem()));
CacheClientNotifier.notifyClients(regionEvent, message);
}
public static void verifyEntries() {
try {
final Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
// wait until we
// have a dead
// server
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
return r.getEntry("key-1").getValue().equals("key-1");
}
@Override
public String description() {
return null;
}
};
// assertIndexDetailsEquals( "key-1",r.getEntry("key-1").getValue());
// wait until we
// have a dead
// server
ev = new WaitCriterion() {
@Override
public boolean done() {
return r.getEntry("key-2").getValue().equals("key-2");
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
// assertIndexDetailsEquals( "key-2",r.getEntry("key-2").getValue());
// wait until we
// have a dead
// server
ev = new WaitCriterion() {
@Override
public boolean done() {
return r.getEntry("key-3").getValue().equals("key-3");
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
// assertIndexDetailsEquals( "key-3",r.getEntry("key-3").getValue());
} catch (Exception ex) {
Assert.fail("failed while verifyEntries()", ex);
}
}
public static void verifyEntriesAfterGiiViaListener() {
// Check whether just the 3 expected updates arrive.
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
return checker.gotFirst();
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
ev = new WaitCriterion() {
@Override
public boolean done() {
return checker.gotSecond();
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
ev = new WaitCriterion() {
@Override
public boolean done() {
return checker.gotThird();
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
assertEquals(3, checker.getUpdates());
}
public static void verifyEntriesAfterGII() {
try {
final Region r = cache.getRegion(SEPARATOR + REGION_NAME);
assertNotNull(r);
// wait until
// we have a
// dead server
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
return r.getEntry("key-1").getValue().equals("value-1");
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
// wait until
// we have a
// dead server
ev = new WaitCriterion() {
@Override
public boolean done() {
return r.getEntry("key-2").getValue().equals("value-2");
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
// assertIndexDetailsEquals( "key-2",r.getEntry("key-2").getValue());
// wait until
// we have a
// dead server
ev = new WaitCriterion() {
@Override
public boolean done() {
return r.getEntry("key-3").getValue().equals("value-3");
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
/*
* assertIndexDetailsEquals( "value-1",r.getEntry("key-1").getValue());
* assertIndexDetailsEquals( "value-2",r.getEntry("key-2").getValue());
* assertIndexDetailsEquals( "value-3",r.getEntry("key-3").getValue());
*/
} catch (Exception ex) {
Assert.fail("failed while verifyEntriesAfterGII()", ex);
}
}
public static void setSystemProperty() {
System.setProperty("slowStartTimeForTesting", "120000");
}
@Override
public final void preTearDown() throws Exception {
ConflationDistributedTestHelper.unsetIsSlowStart();
Invoke.invokeInEveryVM(ConflationDistributedTestHelper.class, "unsetIsSlowStart");
// close the clients first
client0.invoke(HAGIIDistributedTest::closeCache);
// then close the servers
server0.invoke(HAGIIDistributedTest::closeCache);
server1.invoke(HAGIIDistributedTest::closeCache);
}
public static void closeCache() {
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
}
}
private static class GIIChecker extends CacheListenerAdapter {
private boolean gotFirst = false;
private boolean gotSecond = false;
private boolean gotThird = false;
private int updates = 0;
@Override
public void afterUpdate(EntryEvent event) {
updates++;
String key = (String) event.getKey();
String value = (String) event.getNewValue();
if (key.equals("key-1") && value.equals("value-1")) {
gotFirst = true;
}
if (key.equals("key-2") && value.equals("value-2")) {
gotSecond = true;
}
if (key.equals("key-3") && value.equals("value-3")) {
gotThird = true;
}
}
public int getUpdates() {
return updates;
}
public void resetUpdateCounter() {
updates = 0;
}
public boolean gotFirst() {
return gotFirst;
}
public boolean gotSecond() {
return gotSecond;
}
public boolean gotThird() {
return gotThird;
}
}
}
| {
"content_hash": "76d93ccd6c8cdf656c4a381f1d15f16e",
"timestamp": "",
"source": "github",
"line_count": 479,
"max_line_length": 100,
"avg_line_length": 31.32359081419624,
"alnum_prop": 0.676019728072514,
"repo_name": "jdeppe-pivotal/geode",
"id": "571a7329a9e576e01446a657911621cd82c56467",
"size": "15793",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "geode-core/src/distributedTest/java/org/apache/geode/internal/cache/ha/HAGIIDistributedTest.java",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "104031"
},
{
"name": "Dockerfile",
"bytes": "15956"
},
{
"name": "Go",
"bytes": "40709"
},
{
"name": "Groovy",
"bytes": "41916"
},
{
"name": "HTML",
"bytes": "4037680"
},
{
"name": "Java",
"bytes": "33151406"
},
{
"name": "JavaScript",
"bytes": "1780821"
},
{
"name": "Python",
"bytes": "29801"
},
{
"name": "Ruby",
"bytes": "1801"
},
{
"name": "SCSS",
"bytes": "2677"
},
{
"name": "Shell",
"bytes": "275617"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<!--
Copyright 2016 - 2017 Aitu Software Limited.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
<appender name="console" class="org.apache.log4j.ConsoleAppender">
<param name="Target" value="System.out"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d [%t] %-5p %c - %m%n"/>
</layout>
</appender>
<logger name="org.flywaydb">
<level value="warn"/>
</logger>
<logger name="org.eclipse.jetty">
<level value="warn"/>
</logger>
<logger name="com.aitusoftware">
<level value="debug"/>
</logger>
<root>
<priority value ="debug" />
<appender-ref ref="console" />
</root>
</log4j:configuration> | {
"content_hash": "fc898f8efff02a49872d42d566168de6",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 76,
"avg_line_length": 35.625,
"alnum_prop": 0.6575438596491228,
"repo_name": "aitusoftware/flute",
"id": "1b9dd80ba2fcb9056a58f5f003c3cbad78334104",
"size": "1425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flute-server/src/main/resources/logging/log4j.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1992"
},
{
"name": "HTML",
"bytes": "8938"
},
{
"name": "Java",
"bytes": "385573"
},
{
"name": "JavaScript",
"bytes": "44114"
},
{
"name": "Shell",
"bytes": "8669"
}
],
"symlink_target": ""
} |
Code-signing private key notes
==
The private keys for these certificates were generated on Gavin's main work machine,
following the certificate authoritys' recommendations for generating certificate
signing requests.
For OSX, the private key was generated by Keychain.app on Gavin's main work machine.
The key and certificate is in a separate, passphrase-protected keychain file that is
unlocked to sign the Octocoin-Qt.app bundle.
For Windows, the private key was generated by Firefox running on Gavin's main work machine.
The key and certificate were exported into a separate, passphrase-protected PKCS#12 file, and
then deleted from Firefox's keystore. The exported file is used to sign the Windows setup.exe.
Threat analysis
--
Gavin is a single point of failure. He could be coerced to divulge the secret signing keys,
allowing somebody to distribute a Octocoin-Qt.app or bitcoin-qt-setup.exe with a valid
signature but containing a malicious binary.
Or the machine Gavin uses to sign the binaries could be compromised, either remotely or
by breaking in to his office, allowing the attacker to get the private key files and then
install a keylogger to get the passphrase that protects them.
Threat Mitigation
--
"Air gapping" the machine used to do the signing will not work, because the signing
process needs to access a timestamp server over the network. And it would not
prevent the "rubber hose cryptography" threat (coercing Gavin to sign a bad binary
or divulge the private keys).
Windows binaries are reproducibly 'gitian-built', and the setup.exe file created
by the NSIS installer system is a 7zip archive, so you could check to make sure
that the bitcoin-qt.exe file inside the installer had not been tampered with.
However, an attacker could modify the installer's code, so when the setup.exe
was run it compromised users' systems. A volunteer to write an auditing tool
that checks the setup.exe for tampering, and checks the files in it against
the list of gitian signatures, is needed.
The long-term solution is something like the 'gitian downloader' system, which
uses signatures from multiple developers to determine whether or not a binary
should be trusted. However, that just pushes the problem to "how will
non-technical users securely get the gitian downloader code to start?"
| {
"content_hash": "d91a8662a1c19a6666798bc8ad3563f6",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 94,
"avg_line_length": 50.28260869565217,
"alnum_prop": 0.8050151318633809,
"repo_name": "octocoin-project/octocoin",
"id": "7aebc50da87c5e85f0342edd2892ee529037347d",
"size": "2313",
"binary": false,
"copies": "2",
"ref": "refs/heads/master-0.10",
"path": "share/certs/PrivateKeyNotes.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "7639"
},
{
"name": "C",
"bytes": "343076"
},
{
"name": "C++",
"bytes": "3530423"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "18088"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "Makefile",
"bytes": "63617"
},
{
"name": "Objective-C",
"bytes": "2023"
},
{
"name": "Objective-C++",
"bytes": "7240"
},
{
"name": "Protocol Buffer",
"bytes": "2309"
},
{
"name": "Python",
"bytes": "212602"
},
{
"name": "QMake",
"bytes": "2020"
},
{
"name": "Shell",
"bytes": "40452"
}
],
"symlink_target": ""
} |
#import "SDImageCache.h"
#import "SDWebImageDecoder.h"
#import "UIImage+MultiFormat.h"
#import <CommonCrypto/CommonDigest.h>
static const NSInteger TFDefaultCacheMaxCacheAge = 60 * 60 * 24 * 7; // 1 week
// PNG signature bytes and data (below)
static unsigned char kPNGSignatureBytes[8] = {0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A};
static NSData *kPNGSignatureData = nil;
BOOL ImageDataHasPNGPreffix(NSData *data);//sm
BOOL ImageDataHasPNGPreffix(NSData *data) {//dy
NSUInteger pngSignatureLength = [kPNGSignatureData length];
if ([data length] >= pngSignatureLength) {
if ([[data subdataWithRange:NSMakeRange(0, pngSignatureLength)] isEqualToData:kPNGSignatureData]) {
return YES;
}
}
return NO;
}
@interface SDImageCache ()
@property (strong, nonatomic) NSCache *memCache;
@property (strong, nonatomic) NSString *diskCachePath;
@property (strong, nonatomic) NSMutableArray *customPaths;
@property (SDDispatchQueueSetterSementics, nonatomic) dispatch_queue_t ioQueue;
@end
@implementation SDImageCache {
NSFileManager *_fileManager;
}
+ (SDImageCache *)sharedImageCache {
static dispatch_once_t once;
static id instance;
dispatch_once(&once, ^{
instance = [self new];
kPNGSignatureData = [NSData dataWithBytes:kPNGSignatureBytes length:8];//
});
return instance;
}
- (id)init {
return [self initWithNamespace:@"default"];
}
- (id)initWithNamespace:(NSString *)ns {
if ((self = [super init])) {
NSString *fullNamespace = [@"com.hackemist.SDWebImageCache." stringByAppendingString:ns];
// Create IO serial queue
_ioQueue = dispatch_queue_create("com.hackemist.SDWebImageCache", DISPATCH_QUEUE_SERIAL);
// Init default values
_maxCacheAge = TFDefaultCacheMaxCacheAge;
// Init the memory cache
_memCache = [[NSCache alloc] init];
_memCache.name = fullNamespace;
// Init the disk cache
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
_diskCachePath = [paths[0] stringByAppendingPathComponent:fullNamespace];
dispatch_sync(_ioQueue, ^{
_fileManager = [NSFileManager new];
});
#if TARGET_OS_IPHONE
// Subscribe to app events
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(clearMemory)
name:UIApplicationDidReceiveMemoryWarningNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(cleanDisk)
name:UIApplicationWillTerminateNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(backgroundCleanDisk)
name:UIApplicationDidEnterBackgroundNotification
object:nil];
#endif
}
return self;
}
- (void)dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
SDDispatchQueueRelease(_ioQueue);
}
- (void)addReadOnlyCachePath:(NSString *)path {
if (!self.customPaths) {
self.customPaths = [NSMutableArray new];
}
if (![self.customPaths containsObject:path]) {
[self.customPaths addObject:path];
}
}
- (NSString *)cachePathForKey:(NSString *)key inPath:(NSString *)path {
NSString *filename = [self cachedFileNameForKey:key];
return [path stringByAppendingPathComponent:filename];
}
//
- (NSString *)defaultCachePathForKey:(NSString *)key {
return [self cachePathForKey:key inPath:self.diskCachePath];
}
#pragma mark SDImageCache (private)
- (NSString *)cachedFileNameForKey:(NSString *)key {
const char *str = [key UTF8String];
if (str == NULL) {
str = "";
}
unsigned char r[CC_MD5_DIGEST_LENGTH];
CC_MD5(str, (CC_LONG)strlen(str), r);
NSString *filename = [NSString stringWithFormat:@"%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x",
r[0], r[1], r[2], r[3], r[4], r[5], r[6], r[7], r[8], r[9], r[10], r[11], r[12], r[13], r[14], r[15]];
return filename;
}
#pragma mark ImageCache
- (void)storeImage:(UIImage *)image recalculateFromImage:(BOOL)recalculate imageData:(NSData *)imageData forKey:(NSString *)key toDisk:(BOOL)toDisk {
if (!image || !key) {
return;
}
[self.memCache setObject:image forKey:key cost:image.size.height * image.size.width * image.scale];
if (toDisk) {
dispatch_async(self.ioQueue, ^{
NSData *data = imageData;
if (image && (recalculate || !data)) {
#if TARGET_OS_IPHONE
// We need to determine if the image is a PNG or a JPEG
// PNGs are easier to detect because they have a unique signature (http://www.w3.org/TR/PNG-Structure.html)
// The first eight bytes of a PNG file always contain the following (decimal) values:
// 137 80 78 71 13 10 26 10
// We assume the image is PNG, in case the imageData is nil (i.e. if trying to save a UIImage directly),
// we will consider it PNG to avoid loosing the transparency
BOOL imageIsPng = YES;
// But if we have an image data, we will look at the preffix
if ([imageData length] >= [kPNGSignatureData length]) {
imageIsPng = ImageDataHasPNGPreffix(imageData);
}
if (imageIsPng) {
data = UIImagePNGRepresentation(image);
}
else {
data = UIImageJPEGRepresentation(image, (CGFloat)1.0);
}
#else
data = [NSBitmapImageRep representationOfImageRepsInArray:image.representations usingType: NSJPEGFileType properties:nil];
#endif
}
if (data) {
if (![_fileManager fileExistsAtPath:_diskCachePath]) {
[_fileManager createDirectoryAtPath:_diskCachePath withIntermediateDirectories:YES attributes:nil error:NULL];
}
[_fileManager createFileAtPath:[self defaultCachePathForKey:key] contents:data attributes:nil];
}
});
}
}
- (void)storeImage:(UIImage *)image forKey:(NSString *)key {
[self storeImage:image recalculateFromImage:YES imageData:nil forKey:key toDisk:YES];
}
- (void)storeImage:(UIImage *)image forKey:(NSString *)key toDisk:(BOOL)toDisk {
[self storeImage:image recalculateFromImage:YES imageData:nil forKey:key toDisk:toDisk];
}
- (BOOL)diskImageExistsWithKey:(NSString *)key {
BOOL exists = NO;
// this is an exception to access the filemanager on another queue than ioQueue, but we are using the shared instance
// from apple docs on NSFileManager: The methods of the shared NSFileManager object can be called from multiple threads safely.
exists = [[NSFileManager defaultManager] fileExistsAtPath:[self defaultCachePathForKey:key]];
return exists;
}
- (void)diskImageExistsWithKey:(NSString *)key completion:(SDWebImageCheckCacheCompletionBlock)completionBlock {
dispatch_async(_ioQueue, ^{
BOOL exists = [_fileManager fileExistsAtPath:[self defaultCachePathForKey:key]];
if (completionBlock) {
dispatch_async(dispatch_get_main_queue(), ^{
completionBlock(exists);
});
}
});
}
- (UIImage *)imageFromMemoryCacheForKey:(NSString *)key {
return [self.memCache objectForKey:key];
}
- (UIImage *)imageFromDiskCacheForKey:(NSString *)key {
// First check the in-memory cache...
UIImage *image = [self imageFromMemoryCacheForKey:key];
if (image) {
// NSLog(@"image");
return image;
}
// Second check the disk cache...
UIImage *diskImage = [self diskImageForKey:key];
if (diskImage) {
// NSLog(@"diskImage");
CGFloat cost = diskImage.size.height * diskImage.size.width * diskImage.scale;
[self.memCache setObject:diskImage forKey:key cost:cost];
}
return diskImage;
}
- (NSData *)diskImageDataBySearchingAllPathsForKey:(NSString *)key {
NSString *defaultPath = [self defaultCachePathForKey:key];
NSData *data = [NSData dataWithContentsOfFile:defaultPath];
if (data) {
return data;
}
for (NSString *path in self.customPaths) {
NSString *filePath = [self cachePathForKey:key inPath:path];
NSData *imageData = [NSData dataWithContentsOfFile:filePath];
if (imageData) {
return imageData;
}
}
return nil;
}
- (UIImage *)diskImageForKey:(NSString *)key {
NSData *data = [self diskImageDataBySearchingAllPathsForKey:key];
if (data) {
UIImage *image = [UIImage sd_imageWithData:data];
image = [self scaledImageForKey:key image:image];
image = [UIImage decodedImageWithImage:image];
/* 由于UIImage的imageWithData函数是每次画图的时候才将Data解压成ARGB的图像,
所以在每次画图的时候,会有一个解压操作,这样效率很低,但是只有瞬时的内存需求。
为了提高效率通过SDWebImageDecoder将包装在Data下的资源解压,然后画在另外一张图片上,这样这张新图片就不再需要重复解压了。
这种做法是典型的空间换时间的做法
*/
return image;
}
else {
return nil;
}
}
- (UIImage *)scaledImageForKey:(NSString *)key image:(UIImage *)image {
return SDScaledImageForKey(key, image);
}
- (NSOperation *)queryDiskCacheForKey:(NSString *)key done:(SDWebImageQueryCompletedBlock)doneBlock {
if (!doneBlock) {
return nil;
}
if (!key) {
doneBlock(nil, SDImageCacheTypeNone);
return nil;
}
// First check the in-memory cache...
UIImage *image = [self imageFromMemoryCacheForKey:key];
if (image) {
doneBlock(image, SDImageCacheTypeMemory);
return nil;
}
NSOperation *operation = [NSOperation new];
dispatch_async(self.ioQueue, ^{
if (operation.isCancelled) {
return;
}
@autoreleasepool {
UIImage *diskImage = [self diskImageForKey:key];
if (diskImage) {
CGFloat cost = diskImage.size.height * diskImage.size.width * diskImage.scale;
[self.memCache setObject:diskImage forKey:key cost:cost];
}
dispatch_async(dispatch_get_main_queue(), ^{
doneBlock(diskImage, SDImageCacheTypeDisk);
});
}
});
return operation;
}
- (void)removeImageForKey:(NSString *)key {
[self removeImageForKey:key withCompletion:nil];
}
- (void)removeImageForKey:(NSString *)key withCompletion:(SDWebImageNoParamsBlock)completion {
[self removeImageForKey:key fromDisk:YES withCompletion:completion];
}
- (void)removeImageForKey:(NSString *)key fromDisk:(BOOL)fromDisk {
[self removeImageForKey:key fromDisk:fromDisk withCompletion:nil];
}
- (void)removeImageForKey:(NSString *)key fromDisk:(BOOL)fromDisk withCompletion:(SDWebImageNoParamsBlock)completion {
if (key == nil) {
return;
}
[self.memCache removeObjectForKey:key];
if (fromDisk) {
dispatch_async(self.ioQueue, ^{
[_fileManager removeItemAtPath:[self defaultCachePathForKey:key] error:nil];
if (completion) {
dispatch_async(dispatch_get_main_queue(), ^{
completion();
});
}
});
} else if (completion){
completion();
}
}
- (void)setMaxMemoryCost:(NSUInteger)maxMemoryCost {
self.memCache.totalCostLimit = maxMemoryCost;
}
- (NSUInteger)maxMemoryCost {
return self.memCache.totalCostLimit;
}
- (void)clearMemory {
[self.memCache removeAllObjects];
}
- (void)clearDisk {
[self clearDiskOnCompletion:nil];
}
- (void)clearDiskOnCompletion:(SDWebImageNoParamsBlock)completion
{
dispatch_async(self.ioQueue, ^{
[_fileManager removeItemAtPath:self.diskCachePath error:nil];
[_fileManager createDirectoryAtPath:self.diskCachePath
withIntermediateDirectories:YES
attributes:nil
error:NULL];
if (completion) {
dispatch_async(dispatch_get_main_queue(), ^{
completion();
});
}
});
}
- (void)cleanDisk {
[self cleanDiskWithCompletionBlock:nil];
}
- (void)cleanDiskWithCompletionBlock:(SDWebImageNoParamsBlock)completionBlock {
dispatch_async(self.ioQueue, ^{
NSURL *diskCacheURL = [NSURL fileURLWithPath:self.diskCachePath isDirectory:YES];
NSArray *resourceKeys = @[NSURLIsDirectoryKey, NSURLContentModificationDateKey, NSURLTotalFileAllocatedSizeKey];
// This enumerator prefetches useful properties for our cache files.
NSDirectoryEnumerator *fileEnumerator = [_fileManager enumeratorAtURL:diskCacheURL
includingPropertiesForKeys:resourceKeys
options:NSDirectoryEnumerationSkipsHiddenFiles
errorHandler:NULL];
NSDate *expirationDate = [NSDate dateWithTimeIntervalSinceNow:-self.maxCacheAge];
NSMutableDictionary *cacheFiles = [NSMutableDictionary dictionary];
NSUInteger currentCacheSize = 0;
// Enumerate all of the files in the cache directory. This loop has two purposes:
//
// 1. Removing files that are older than the expiration date.
// 2. Storing file attributes for the size-based cleanup pass.
NSMutableArray *urlsToDelete = [[NSMutableArray alloc] init];
for (NSURL *fileURL in fileEnumerator) {
NSDictionary *resourceValues = [fileURL resourceValuesForKeys:resourceKeys error:NULL];
// Skip directories.
if ([resourceValues[NSURLIsDirectoryKey] boolValue]) {
continue;
}
// Remove files that are older than the expiration date;
NSDate *modificationDate = resourceValues[NSURLContentModificationDateKey];
if ([[modificationDate laterDate:expirationDate] isEqualToDate:expirationDate]) {
[urlsToDelete addObject:fileURL];
continue;
}
// Store a reference to this file and account for its total size.
NSNumber *totalAllocatedSize = resourceValues[NSURLTotalFileAllocatedSizeKey];
currentCacheSize += [totalAllocatedSize unsignedIntegerValue];
[cacheFiles setObject:resourceValues forKey:fileURL];
}
for (NSURL *fileURL in urlsToDelete) {
[_fileManager removeItemAtURL:fileURL error:nil];
}
// If our remaining disk cache exceeds a configured maximum size, perform a second
// size-based cleanup pass. We delete the oldest files first.
if (self.maxCacheSize > 0 && currentCacheSize > self.maxCacheSize) {
// Target half of our maximum cache size for this cleanup pass.
const NSUInteger desiredCacheSize = self.maxCacheSize / 2;
// Sort the remaining cache files by their last modification time (oldest first).
NSArray *sortedFiles = [cacheFiles keysSortedByValueWithOptions:NSSortConcurrent
usingComparator:^NSComparisonResult(id obj1, id obj2) {
return [obj1[NSURLContentModificationDateKey] compare:obj2[NSURLContentModificationDateKey]];
}];
// Delete files until we fall below our desired cache size.
for (NSURL *fileURL in sortedFiles) {
if ([_fileManager removeItemAtURL:fileURL error:nil]) {
NSDictionary *resourceValues = cacheFiles[fileURL];
NSNumber *totalAllocatedSize = resourceValues[NSURLTotalFileAllocatedSizeKey];
currentCacheSize -= [totalAllocatedSize unsignedIntegerValue];
if (currentCacheSize < desiredCacheSize) {
break;
}
}
}
}
if (completionBlock) {
dispatch_async(dispatch_get_main_queue(), ^{
completionBlock();
});
}
});
}
- (void)backgroundCleanDisk {
UIApplication *application = [UIApplication sharedApplication];
__block UIBackgroundTaskIdentifier bgTask = [application beginBackgroundTaskWithExpirationHandler:^{
// Clean up any unfinished task business by marking where you
// stopped or ending the task outright.
[application endBackgroundTask:bgTask];
bgTask = UIBackgroundTaskInvalid;
}];
// Start the long-running task and return immediately.
[self cleanDiskWithCompletionBlock:^{
[application endBackgroundTask:bgTask];
bgTask = UIBackgroundTaskInvalid;
}];
}
- (NSUInteger)getSize {
__block NSUInteger size = 0;
dispatch_sync(self.ioQueue, ^{
NSDirectoryEnumerator *fileEnumerator = [_fileManager enumeratorAtPath:self.diskCachePath];
for (NSString *fileName in fileEnumerator) {
NSString *filePath = [self.diskCachePath stringByAppendingPathComponent:fileName];
NSDictionary *attrs = [[NSFileManager defaultManager] attributesOfItemAtPath:filePath error:nil];
size += [attrs fileSize];
}
});
return size;
}
- (NSUInteger)getDiskCount {
__block NSUInteger count = 0;
dispatch_sync(self.ioQueue, ^{
NSDirectoryEnumerator *fileEnumerator = [_fileManager enumeratorAtPath:self.diskCachePath];
count = [[fileEnumerator allObjects] count];
});
return count;
}
- (void)calculateSizeWithCompletionBlock:(SDWebImageCalculateSizeBlock)completionBlock {
NSURL *diskCacheURL = [NSURL fileURLWithPath:self.diskCachePath isDirectory:YES];
dispatch_async(self.ioQueue, ^{
NSUInteger fileCount = 0;
NSUInteger totalSize = 0;
NSDirectoryEnumerator *fileEnumerator = [_fileManager enumeratorAtURL:diskCacheURL
includingPropertiesForKeys:@[NSFileSize]
options:NSDirectoryEnumerationSkipsHiddenFiles
errorHandler:NULL];
for (NSURL *fileURL in fileEnumerator) {
NSNumber *fileSize;
[fileURL getResourceValue:&fileSize forKey:NSURLFileSizeKey error:NULL];
totalSize += [fileSize unsignedIntegerValue];
fileCount += 1;
}
if (completionBlock) {
dispatch_async(dispatch_get_main_queue(), ^{
completionBlock(fileCount, totalSize);
});
}
});
}
@end
| {
"content_hash": "2e1ba582138391d973e5861d87477aba",
"timestamp": "",
"source": "github",
"line_count": 539,
"max_line_length": 157,
"avg_line_length": 35.90909090909091,
"alnum_prop": 0.6186515112374064,
"repo_name": "xsd0720/QQMusic",
"id": "bc8df50bd96386a361b9d1973fff386853d78bc6",
"size": "19835",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "QQMusic/Lib/SDWebImage/SDImageCache.m",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "40695"
},
{
"name": "Objective-C",
"bytes": "1584063"
}
],
"symlink_target": ""
} |
package com.haulmont.cuba.cascadedelete;
import com.haulmont.cuba.core.EntityManager;
import com.haulmont.cuba.core.Transaction;
import com.haulmont.cuba.core.global.AppBeans;
import com.haulmont.cuba.core.global.Metadata;
import com.haulmont.cuba.core.sys.listener.EntityListenerManager;
import com.haulmont.cuba.testmodel.cascadedelete.CascadeEntity;
import com.haulmont.cuba.testsupport.TestContainer;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
public class CascadeDeleteTest {
@ClassRule
public static TestContainer cont = TestContainer.Common.INSTANCE;
protected CascadeEntity root, first, second, third;
@Before
public void setUp() throws Exception {
Transaction tx = cont.persistence().createTransaction();
try {
EntityManager em = cont.persistence().getEntityManager();
Metadata metadata = cont.metadata();
root = metadata.create(CascadeEntity.class);
root.setName("root");
em.persist(root);
first = metadata.create(CascadeEntity.class);
first.setName("first");
first.setFather(root);
em.persist(first);
second = metadata.create(CascadeEntity.class);
second.setName("second");
second.setFather(first);
em.persist(second);
third = metadata.create(CascadeEntity.class);
third.setName("third");
third.setFather(second);
em.persist(third);
tx.commit();
} finally {
tx.end();
}
}
@After
public void tearDown() throws Exception {
cont.deleteRecord(third, second, first, root);
}
@Test
public void testRemoveCascade() throws Exception {
try (Transaction tx = cont.persistence().createTransaction()) {
EntityManager em = cont.persistence().getEntityManager();
CascadeEntity loadedRoot = em.find(CascadeEntity.class, root.getId());
em.find(CascadeEntity.class, first.getId());
em.remove(loadedRoot);
tx.commit();
}
try (Transaction tx = cont.persistence().createTransaction()) {
EntityManager em = cont.persistence().getEntityManager();
List r = em.createQuery("select e from test$CascadeEntity e where e.id in ?1")
.setParameter(1, Arrays.asList(root, first, second, third))
.getResultList();
assertEquals(0, r.size());
tx.commit();
}
}
@Test
public void testEntityListenerOnCascadeDelete() throws Exception {
EntityListenerManager entityListenerManager = AppBeans.get(EntityListenerManager.class);
entityListenerManager.addListener(CascadeEntity.class, DeleteCascadeEntityListener.class);
try (Transaction tx = cont.persistence().createTransaction()) {
EntityManager em = cont.persistence().getEntityManager();
CascadeEntity loadedSecond = em.find(CascadeEntity.class, second.getId());
em.find(CascadeEntity.class, third.getId());
em.remove(loadedSecond);
tx.commit();
}
entityListenerManager.removeListener(CascadeEntity.class, DeleteCascadeEntityListener.class);
assertEquals(2, DeleteCascadeEntityListener.deletedEvents.size());
assertTrue(DeleteCascadeEntityListener.deletedEvents.contains(second.getId().toString()));
assertTrue(DeleteCascadeEntityListener.deletedEvents.contains(third.getId().toString()));
}
@Test
public void testEntityListenerOnUpdate() throws Exception {
EntityListenerManager entityListenerManager = AppBeans.get(EntityListenerManager.class);
entityListenerManager.addListener(CascadeEntity.class, UpdateCascadeEntityListener.class);
try (Transaction tx = cont.persistence().createTransaction()) {
EntityManager em = cont.persistence().getEntityManager();
CascadeEntity loadedThird = em.find(CascadeEntity.class, third.getId());
CascadeEntity loadedSecond = em.find(CascadeEntity.class, second.getId());
loadedThird.setName("third#1");
tx.commit();
}
entityListenerManager.removeListener(CascadeEntity.class, UpdateCascadeEntityListener.class);
assertEquals(2, UpdateCascadeEntityListener.updatedEvents.size());
assertTrue(UpdateCascadeEntityListener.updatedEvents.contains(second.getId().toString()));
assertTrue(UpdateCascadeEntityListener.updatedEvents.contains(third.getId().toString()));
}
}
| {
"content_hash": "4169c4d0983aa02c5228eb049a041c86",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 101,
"avg_line_length": 40.29661016949152,
"alnum_prop": 0.6702418506834911,
"repo_name": "dimone-kun/cuba",
"id": "61299aaa54e83f53ed80fcd56243b657481f4782",
"size": "5356",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/core/test/com/haulmont/cuba/cascadedelete/CascadeDeleteTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "77"
},
{
"name": "CSS",
"bytes": "262124"
},
{
"name": "FreeMarker",
"bytes": "3996"
},
{
"name": "GAP",
"bytes": "33866"
},
{
"name": "Groovy",
"bytes": "402320"
},
{
"name": "HTML",
"bytes": "6405"
},
{
"name": "Java",
"bytes": "18662263"
},
{
"name": "PLSQL",
"bytes": "30350"
},
{
"name": "PLpgSQL",
"bytes": "1723"
},
{
"name": "SQLPL",
"bytes": "93321"
},
{
"name": "Shell",
"bytes": "88"
},
{
"name": "XSLT",
"bytes": "63258"
}
],
"symlink_target": ""
} |
package policyinsights
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"encoding/json"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/date"
)
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// PolicyStatesResource enumerates the values for policy states resource.
type PolicyStatesResource string
const (
// Default ...
Default PolicyStatesResource = "default"
// Latest ...
Latest PolicyStatesResource = "latest"
)
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// PossiblePolicyStatesResourceValues returns an array of possible values for the PolicyStatesResource const type.
func PossiblePolicyStatesResourceValues() []PolicyStatesResource {
return []PolicyStatesResource{Default, Latest}
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// Operation operation definition.
type Operation struct {
// Name - Operation name.
Name *string `json:"name,omitempty"`
// Display - Display metadata associated with the operation.
Display *OperationDisplay `json:"display,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// OperationDisplay display metadata associated with the operation.
type OperationDisplay struct {
// Provider - Resource provider name.
Provider *string `json:"provider,omitempty"`
// Resource - Resource name on which the operation is performed.
Resource *string `json:"resource,omitempty"`
// Operation - Operation name.
Operation *string `json:"operation,omitempty"`
// Description - Operation description.
Description *string `json:"description,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// OperationsListResults list of available operations.
type OperationsListResults struct {
autorest.Response `json:"-"`
// OdataCount - OData entity count; represents the number of operations returned.
OdataCount *int32 `json:"@odata.count,omitempty"`
// Value - List of available operations.
Value *[]Operation `json:"value,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// PolicyAssignmentSummary policy assignment summary.
type PolicyAssignmentSummary struct {
// PolicyAssignmentID - Policy assignment ID.
PolicyAssignmentID *string `json:"policyAssignmentId,omitempty"`
// PolicySetDefinitionID - Policy set definition ID, if the policy assignment is for a policy set.
PolicySetDefinitionID *string `json:"policySetDefinitionId,omitempty"`
// Results - Non-compliance summary for the policy assignment.
Results *SummaryResults `json:"results,omitempty"`
// PolicyDefinitions - Policy definitions summary.
PolicyDefinitions *[]PolicyDefinitionSummary `json:"policyDefinitions,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// PolicyDefinitionSummary policy definition summary.
type PolicyDefinitionSummary struct {
// PolicyDefinitionID - Policy definition ID.
PolicyDefinitionID *string `json:"policyDefinitionId,omitempty"`
// Effect - Policy effect, i.e. policy definition action.
Effect *string `json:"effect,omitempty"`
// Results - Non-compliance summary for the policy definition.
Results *SummaryResults `json:"results,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// PolicyEvent policy event record.
type PolicyEvent struct {
// AdditionalProperties - Unmatched properties from the message are deserialized this collection
AdditionalProperties map[string]interface{} `json:""`
// OdataID - OData entity ID; always set to null since policy event records do not have an entity ID.
OdataID *string `json:"@odata.id,omitempty"`
// OdataContext - OData context string; used by OData clients to resolve type information based on metadata.
OdataContext *string `json:"@odata.context,omitempty"`
// Timestamp - Timestamp for the policy event record.
Timestamp *date.Time `json:"timestamp,omitempty"`
// ResourceID - Resource ID.
ResourceID *string `json:"resourceId,omitempty"`
// PolicyAssignmentID - Policy assignment ID.
PolicyAssignmentID *string `json:"policyAssignmentId,omitempty"`
// PolicyDefinitionID - Policy definition ID.
PolicyDefinitionID *string `json:"policyDefinitionId,omitempty"`
// EffectiveParameters - Effective parameters for the policy assignment.
EffectiveParameters *string `json:"effectiveParameters,omitempty"`
// IsCompliant - Flag which states whether the resource is compliant against the policy assignment it was evaluated against.
IsCompliant *bool `json:"isCompliant,omitempty"`
// SubscriptionID - Subscription ID.
SubscriptionID *string `json:"subscriptionId,omitempty"`
// ResourceType - Resource type.
ResourceType *string `json:"resourceType,omitempty"`
// ResourceLocation - Resource location.
ResourceLocation *string `json:"resourceLocation,omitempty"`
// ResourceGroup - Resource group name.
ResourceGroup *string `json:"resourceGroup,omitempty"`
// ResourceTags - List of resource tags.
ResourceTags *string `json:"resourceTags,omitempty"`
// PolicyAssignmentName - Policy assignment name.
PolicyAssignmentName *string `json:"policyAssignmentName,omitempty"`
// PolicyAssignmentOwner - Policy assignment owner.
PolicyAssignmentOwner *string `json:"policyAssignmentOwner,omitempty"`
// PolicyAssignmentParameters - Policy assignment parameters.
PolicyAssignmentParameters *string `json:"policyAssignmentParameters,omitempty"`
// PolicyAssignmentScope - Policy assignment scope.
PolicyAssignmentScope *string `json:"policyAssignmentScope,omitempty"`
// PolicyDefinitionName - Policy definition name.
PolicyDefinitionName *string `json:"policyDefinitionName,omitempty"`
// PolicyDefinitionAction - Policy definition action, i.e. effect.
PolicyDefinitionAction *string `json:"policyDefinitionAction,omitempty"`
// PolicyDefinitionCategory - Policy definition category.
PolicyDefinitionCategory *string `json:"policyDefinitionCategory,omitempty"`
// PolicySetDefinitionID - Policy set definition ID, if the policy assignment is for a policy set.
PolicySetDefinitionID *string `json:"policySetDefinitionId,omitempty"`
// PolicySetDefinitionName - Policy set definition name, if the policy assignment is for a policy set.
PolicySetDefinitionName *string `json:"policySetDefinitionName,omitempty"`
// PolicySetDefinitionOwner - Policy set definition owner, if the policy assignment is for a policy set.
PolicySetDefinitionOwner *string `json:"policySetDefinitionOwner,omitempty"`
// PolicySetDefinitionCategory - Policy set definition category, if the policy assignment is for a policy set.
PolicySetDefinitionCategory *string `json:"policySetDefinitionCategory,omitempty"`
// PolicySetDefinitionParameters - Policy set definition parameters, if the policy assignment is for a policy set.
PolicySetDefinitionParameters *string `json:"policySetDefinitionParameters,omitempty"`
// ManagementGroupIds - Comma seperated list of management group IDs, which represent the hierarchy of the management groups the resource is under.
ManagementGroupIds *string `json:"managementGroupIds,omitempty"`
// PolicyDefinitionReferenceID - Reference ID for the policy definition inside the policy set, if the policy assignment is for a policy set.
PolicyDefinitionReferenceID *string `json:"policyDefinitionReferenceId,omitempty"`
// TenantID - Tenant ID for the policy event record.
TenantID *string `json:"tenantId,omitempty"`
// PrincipalOid - Principal object ID for the user who initiated the resource operation that triggered the policy event.
PrincipalOid *string `json:"principalOid,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// MarshalJSON is the custom marshaler for PolicyEvent.
func (peVar PolicyEvent) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if peVar.OdataID != nil {
objectMap["@odata.id"] = peVar.OdataID
}
if peVar.OdataContext != nil {
objectMap["@odata.context"] = peVar.OdataContext
}
if peVar.Timestamp != nil {
objectMap["timestamp"] = peVar.Timestamp
}
if peVar.ResourceID != nil {
objectMap["resourceId"] = peVar.ResourceID
}
if peVar.PolicyAssignmentID != nil {
objectMap["policyAssignmentId"] = peVar.PolicyAssignmentID
}
if peVar.PolicyDefinitionID != nil {
objectMap["policyDefinitionId"] = peVar.PolicyDefinitionID
}
if peVar.EffectiveParameters != nil {
objectMap["effectiveParameters"] = peVar.EffectiveParameters
}
if peVar.IsCompliant != nil {
objectMap["isCompliant"] = peVar.IsCompliant
}
if peVar.SubscriptionID != nil {
objectMap["subscriptionId"] = peVar.SubscriptionID
}
if peVar.ResourceType != nil {
objectMap["resourceType"] = peVar.ResourceType
}
if peVar.ResourceLocation != nil {
objectMap["resourceLocation"] = peVar.ResourceLocation
}
if peVar.ResourceGroup != nil {
objectMap["resourceGroup"] = peVar.ResourceGroup
}
if peVar.ResourceTags != nil {
objectMap["resourceTags"] = peVar.ResourceTags
}
if peVar.PolicyAssignmentName != nil {
objectMap["policyAssignmentName"] = peVar.PolicyAssignmentName
}
if peVar.PolicyAssignmentOwner != nil {
objectMap["policyAssignmentOwner"] = peVar.PolicyAssignmentOwner
}
if peVar.PolicyAssignmentParameters != nil {
objectMap["policyAssignmentParameters"] = peVar.PolicyAssignmentParameters
}
if peVar.PolicyAssignmentScope != nil {
objectMap["policyAssignmentScope"] = peVar.PolicyAssignmentScope
}
if peVar.PolicyDefinitionName != nil {
objectMap["policyDefinitionName"] = peVar.PolicyDefinitionName
}
if peVar.PolicyDefinitionAction != nil {
objectMap["policyDefinitionAction"] = peVar.PolicyDefinitionAction
}
if peVar.PolicyDefinitionCategory != nil {
objectMap["policyDefinitionCategory"] = peVar.PolicyDefinitionCategory
}
if peVar.PolicySetDefinitionID != nil {
objectMap["policySetDefinitionId"] = peVar.PolicySetDefinitionID
}
if peVar.PolicySetDefinitionName != nil {
objectMap["policySetDefinitionName"] = peVar.PolicySetDefinitionName
}
if peVar.PolicySetDefinitionOwner != nil {
objectMap["policySetDefinitionOwner"] = peVar.PolicySetDefinitionOwner
}
if peVar.PolicySetDefinitionCategory != nil {
objectMap["policySetDefinitionCategory"] = peVar.PolicySetDefinitionCategory
}
if peVar.PolicySetDefinitionParameters != nil {
objectMap["policySetDefinitionParameters"] = peVar.PolicySetDefinitionParameters
}
if peVar.ManagementGroupIds != nil {
objectMap["managementGroupIds"] = peVar.ManagementGroupIds
}
if peVar.PolicyDefinitionReferenceID != nil {
objectMap["policyDefinitionReferenceId"] = peVar.PolicyDefinitionReferenceID
}
if peVar.TenantID != nil {
objectMap["tenantId"] = peVar.TenantID
}
if peVar.PrincipalOid != nil {
objectMap["principalOid"] = peVar.PrincipalOid
}
for k, v := range peVar.AdditionalProperties {
objectMap[k] = v
}
return json.Marshal(objectMap)
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// PolicyEventsQueryResults query results.
type PolicyEventsQueryResults struct {
autorest.Response `json:"-"`
// OdataContext - OData context string; used by OData clients to resolve type information based on metadata.
OdataContext *string `json:"@odata.context,omitempty"`
// OdataCount - OData entity count; represents the number of policy event records returned.
OdataCount *int32 `json:"@odata.count,omitempty"`
// Value - Query results.
Value *[]PolicyEvent `json:"value,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// PolicyState policy state record.
type PolicyState struct {
// AdditionalProperties - Unmatched properties from the message are deserialized this collection
AdditionalProperties map[string]interface{} `json:""`
// OdataID - OData entity ID; always set to null since policy state records do not have an entity ID.
OdataID *string `json:"@odata.id,omitempty"`
// OdataContext - OData context string; used by OData clients to resolve type information based on metadata.
OdataContext *string `json:"@odata.context,omitempty"`
// Timestamp - Timestamp for the policy state record.
Timestamp *date.Time `json:"timestamp,omitempty"`
// ResourceID - Resource ID.
ResourceID *string `json:"resourceId,omitempty"`
// PolicyAssignmentID - Policy assignment ID.
PolicyAssignmentID *string `json:"policyAssignmentId,omitempty"`
// PolicyDefinitionID - Policy definition ID.
PolicyDefinitionID *string `json:"policyDefinitionId,omitempty"`
// EffectiveParameters - Effective parameters for the policy assignment.
EffectiveParameters *string `json:"effectiveParameters,omitempty"`
// IsCompliant - Flag which states whether the resource is compliant against the policy assignment it was evaluated against.
IsCompliant *bool `json:"isCompliant,omitempty"`
// SubscriptionID - Subscription ID.
SubscriptionID *string `json:"subscriptionId,omitempty"`
// ResourceType - Resource type.
ResourceType *string `json:"resourceType,omitempty"`
// ResourceLocation - Resource location.
ResourceLocation *string `json:"resourceLocation,omitempty"`
// ResourceGroup - Resource group name.
ResourceGroup *string `json:"resourceGroup,omitempty"`
// ResourceTags - List of resource tags.
ResourceTags *string `json:"resourceTags,omitempty"`
// PolicyAssignmentName - Policy assignment name.
PolicyAssignmentName *string `json:"policyAssignmentName,omitempty"`
// PolicyAssignmentOwner - Policy assignment owner.
PolicyAssignmentOwner *string `json:"policyAssignmentOwner,omitempty"`
// PolicyAssignmentParameters - Policy assignment parameters.
PolicyAssignmentParameters *string `json:"policyAssignmentParameters,omitempty"`
// PolicyAssignmentScope - Policy assignment scope.
PolicyAssignmentScope *string `json:"policyAssignmentScope,omitempty"`
// PolicyDefinitionName - Policy definition name.
PolicyDefinitionName *string `json:"policyDefinitionName,omitempty"`
// PolicyDefinitionAction - Policy definition action, i.e. effect.
PolicyDefinitionAction *string `json:"policyDefinitionAction,omitempty"`
// PolicyDefinitionCategory - Policy definition category.
PolicyDefinitionCategory *string `json:"policyDefinitionCategory,omitempty"`
// PolicySetDefinitionID - Policy set definition ID, if the policy assignment is for a policy set.
PolicySetDefinitionID *string `json:"policySetDefinitionId,omitempty"`
// PolicySetDefinitionName - Policy set definition name, if the policy assignment is for a policy set.
PolicySetDefinitionName *string `json:"policySetDefinitionName,omitempty"`
// PolicySetDefinitionOwner - Policy set definition owner, if the policy assignment is for a policy set.
PolicySetDefinitionOwner *string `json:"policySetDefinitionOwner,omitempty"`
// PolicySetDefinitionCategory - Policy set definition category, if the policy assignment is for a policy set.
PolicySetDefinitionCategory *string `json:"policySetDefinitionCategory,omitempty"`
// PolicySetDefinitionParameters - Policy set definition parameters, if the policy assignment is for a policy set.
PolicySetDefinitionParameters *string `json:"policySetDefinitionParameters,omitempty"`
// ManagementGroupIds - Comma seperated list of management group IDs, which represent the hierarchy of the management groups the resource is under.
ManagementGroupIds *string `json:"managementGroupIds,omitempty"`
// PolicyDefinitionReferenceID - Reference ID for the policy definition inside the policy set, if the policy assignment is for a policy set.
PolicyDefinitionReferenceID *string `json:"policyDefinitionReferenceId,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// MarshalJSON is the custom marshaler for PolicyState.
func (ps PolicyState) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if ps.OdataID != nil {
objectMap["@odata.id"] = ps.OdataID
}
if ps.OdataContext != nil {
objectMap["@odata.context"] = ps.OdataContext
}
if ps.Timestamp != nil {
objectMap["timestamp"] = ps.Timestamp
}
if ps.ResourceID != nil {
objectMap["resourceId"] = ps.ResourceID
}
if ps.PolicyAssignmentID != nil {
objectMap["policyAssignmentId"] = ps.PolicyAssignmentID
}
if ps.PolicyDefinitionID != nil {
objectMap["policyDefinitionId"] = ps.PolicyDefinitionID
}
if ps.EffectiveParameters != nil {
objectMap["effectiveParameters"] = ps.EffectiveParameters
}
if ps.IsCompliant != nil {
objectMap["isCompliant"] = ps.IsCompliant
}
if ps.SubscriptionID != nil {
objectMap["subscriptionId"] = ps.SubscriptionID
}
if ps.ResourceType != nil {
objectMap["resourceType"] = ps.ResourceType
}
if ps.ResourceLocation != nil {
objectMap["resourceLocation"] = ps.ResourceLocation
}
if ps.ResourceGroup != nil {
objectMap["resourceGroup"] = ps.ResourceGroup
}
if ps.ResourceTags != nil {
objectMap["resourceTags"] = ps.ResourceTags
}
if ps.PolicyAssignmentName != nil {
objectMap["policyAssignmentName"] = ps.PolicyAssignmentName
}
if ps.PolicyAssignmentOwner != nil {
objectMap["policyAssignmentOwner"] = ps.PolicyAssignmentOwner
}
if ps.PolicyAssignmentParameters != nil {
objectMap["policyAssignmentParameters"] = ps.PolicyAssignmentParameters
}
if ps.PolicyAssignmentScope != nil {
objectMap["policyAssignmentScope"] = ps.PolicyAssignmentScope
}
if ps.PolicyDefinitionName != nil {
objectMap["policyDefinitionName"] = ps.PolicyDefinitionName
}
if ps.PolicyDefinitionAction != nil {
objectMap["policyDefinitionAction"] = ps.PolicyDefinitionAction
}
if ps.PolicyDefinitionCategory != nil {
objectMap["policyDefinitionCategory"] = ps.PolicyDefinitionCategory
}
if ps.PolicySetDefinitionID != nil {
objectMap["policySetDefinitionId"] = ps.PolicySetDefinitionID
}
if ps.PolicySetDefinitionName != nil {
objectMap["policySetDefinitionName"] = ps.PolicySetDefinitionName
}
if ps.PolicySetDefinitionOwner != nil {
objectMap["policySetDefinitionOwner"] = ps.PolicySetDefinitionOwner
}
if ps.PolicySetDefinitionCategory != nil {
objectMap["policySetDefinitionCategory"] = ps.PolicySetDefinitionCategory
}
if ps.PolicySetDefinitionParameters != nil {
objectMap["policySetDefinitionParameters"] = ps.PolicySetDefinitionParameters
}
if ps.ManagementGroupIds != nil {
objectMap["managementGroupIds"] = ps.ManagementGroupIds
}
if ps.PolicyDefinitionReferenceID != nil {
objectMap["policyDefinitionReferenceId"] = ps.PolicyDefinitionReferenceID
}
for k, v := range ps.AdditionalProperties {
objectMap[k] = v
}
return json.Marshal(objectMap)
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// PolicyStatesQueryResults query results.
type PolicyStatesQueryResults struct {
autorest.Response `json:"-"`
// OdataContext - OData context string; used by OData clients to resolve type information based on metadata.
OdataContext *string `json:"@odata.context,omitempty"`
// OdataCount - OData entity count; represents the number of policy state records returned.
OdataCount *int32 `json:"@odata.count,omitempty"`
// Value - Query results.
Value *[]PolicyState `json:"value,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// QueryFailure error response.
type QueryFailure struct {
// Error - Error definition.
Error *QueryFailureError `json:"error,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// QueryFailureError error definition.
type QueryFailureError struct {
// Code - Service specific error code which serves as the substatus for the HTTP error code.
Code *string `json:"code,omitempty"`
// Message - Description of the error.
Message *string `json:"message,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// String ...
type String struct {
autorest.Response `json:"-"`
Value *string `json:"value,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// SummarizeResults summarize action results.
type SummarizeResults struct {
autorest.Response `json:"-"`
// OdataContext - OData context string; used by OData clients to resolve type information based on metadata.
OdataContext *string `json:"@odata.context,omitempty"`
// OdataCount - OData entity count; represents the number of summaries returned; always set to 1.
OdataCount *int32 `json:"@odata.count,omitempty"`
// Value - Summarize action results.
Value *[]Summary `json:"value,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// Summary summary results.
type Summary struct {
// OdataID - OData entity ID; always set to null since summaries do not have an entity ID.
OdataID *string `json:"@odata.id,omitempty"`
// OdataContext - OData context string; used by OData clients to resolve type information based on metadata.
OdataContext *string `json:"@odata.context,omitempty"`
// Results - Non-compliance summary for all policy assignments.
Results *SummaryResults `json:"results,omitempty"`
// PolicyAssignments - Policy assignments summary.
PolicyAssignments *[]PolicyAssignmentSummary `json:"policyAssignments,omitempty"`
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/preview/policyinsights/mgmt/2017-12-12-preview/policyinsights instead.
// SummaryResults non-compliance summary on a particular summary level.
type SummaryResults struct {
// QueryResultsURI - HTTP POST URI for queryResults action on Microsoft.PolicyInsights to retrieve raw results for the non-compliance summary.
QueryResultsURI *string `json:"queryResultsUri,omitempty"`
// NonCompliantResources - Number of non-compliant resources.
NonCompliantResources *int32 `json:"nonCompliantResources,omitempty"`
// NonCompliantPolicies - Number of non-compliant policies.
NonCompliantPolicies *int32 `json:"nonCompliantPolicies,omitempty"`
}
| {
"content_hash": "7267f7b392f7e98296e72f82ea317f78",
"timestamp": "",
"source": "github",
"line_count": 494,
"max_line_length": 148,
"avg_line_length": 48.512145748987855,
"alnum_prop": 0.784978093052368,
"repo_name": "seuffert/rclone",
"id": "17b809c3e67df12147535ed8faddd2367cf2b3bb",
"size": "23965",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vendor/github.com/Azure/azure-sdk-for-go/services/policyinsights/mgmt/2017-12-12-preview/policyinsights/models.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "258"
},
{
"name": "Go",
"bytes": "2003268"
},
{
"name": "HTML",
"bytes": "493675"
},
{
"name": "Makefile",
"bytes": "7245"
},
{
"name": "Python",
"bytes": "6913"
},
{
"name": "Roff",
"bytes": "446311"
},
{
"name": "Shell",
"bytes": "2612"
}
],
"symlink_target": ""
} |
<?php
namespace Symfony\Component\Security\Core\Authorization
{
interface AuthorizationCheckerInterface
{
public function isGranted($attributes, $object = null);
}
}
namespace Sensio\Bundle\FrameworkExtraBundle\Configuration
{
/**
* @Annotation
*/
class Security
{
}
/**
* @Annotation
*/
class IsGranted
{
}
}
| {
"content_hash": "edf17a73bce40c223663ba3c9ac7b6e5",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 63,
"avg_line_length": 14.807692307692308,
"alnum_prop": 0.6103896103896104,
"repo_name": "Haehnchen/idea-php-symfony2-plugin",
"id": "e1279f8495e2fafd8c0d3b7d560d66367b1e60f2",
"size": "385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/fr/adrienbrault/idea/symfony2plugin/tests/security/fixtures/classes.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "166"
},
{
"name": "HTML",
"bytes": "65374"
},
{
"name": "Java",
"bytes": "4169732"
},
{
"name": "JavaScript",
"bytes": "2563"
},
{
"name": "Kotlin",
"bytes": "4091"
},
{
"name": "PHP",
"bytes": "106315"
},
{
"name": "Shell",
"bytes": "405"
},
{
"name": "Twig",
"bytes": "1148"
}
],
"symlink_target": ""
} |
.root {
margin: 0;
list-style: var(--list-style);
}
.root[line] .item:not(:last-child) {
border-bottom: var(--list-item-border-bottom);
}
.root[striped] .item:nth-of-type(odd) {
background: var(--list-item-background-striped);
}
.root[hover] .item:hover {
background: var(--list-item-background-hover);
}
| {
"content_hash": "15d292fff2581f6c048b7b8221de25f0",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 52,
"avg_line_length": 20.5,
"alnum_prop": 0.6402439024390244,
"repo_name": "vusion/proto-ui",
"id": "37efcb9c731a065f7b520fd5c01ea8ae6f6a4dac",
"size": "328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/components/u-list.vue/module.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "104559"
},
{
"name": "HTML",
"bytes": "44645"
},
{
"name": "JavaScript",
"bytes": "351455"
},
{
"name": "Vue",
"bytes": "440"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html lang="en">
<head>
<meta content="text/html; charset=iso-8859-1" http-equiv="content-type">
<title>Total length of upstream channels</title>
<link rel="stylesheet" type="text/css" href="Help.css">
</head>
<body>
<h1>Total length of upstream channels</h1>
<p>
This tool can be used to calculate the total length of channels draining to
each grid cell in a raster stream network. The user must specify the names
of a streams raster image and <a href="FlowPointerD8.html">D8 pointer</a>
image. Stream cells are designated in the streams image as all positive,
nonzero values. Thus all
non-stream or background grid cells are commonly assigned either zeros or
<b><i>NoData</i></b> values. The pointer image is used to traverse the
stream network and should only be created using the
<a href="FlowPointerD8.html">D8 algorithm</a>. Background cells will be
assigned the <b><i>NoData</i></b> value in the output image. The output
raster is of an <i>integer</i> data type and <i>continuous</i> data scale.</p>
<h2 class="SeeAlso">See Also:</h2>
<ul>
<li><a href="FlowPointerD8.html">D8 Flow Pointer</a></li>
<li><a href="FurthestUpstreamChannelHeadDistance.html">Furthest Upstream Channel Head Distance</a></li>
</ul>
<h2 class="SeeAlso">Scripting:</h2>
<p>The following is an example of a Python script that uses this tool:</p>
<p style="background-color: rgb(240,240,240)">
<code>
wd = pluginHost.getWorkingDirectory() <br>
streamsFile = wd + "streams.dep" <br>
pointerFile = wd + "D8 pointer.dep" <br>
outputFile = wd + "output.dep" <br>
args = [streamsFile, pointerFile, outputFile] <br>
pluginHost.runPlugin("TotalLengthOfUpstreamChannels", args, False) <br>
</code>
</p>
<p>This is a Groovy script also using this tool:</p>
<p style="background-color: rgb(240,240,240)">
<code>
def wd = pluginHost.getWorkingDirectory() <br>
def streamsFile = wd + "streams.dep" <br>
def pointerFile = wd + "D8 pointer.dep" <br>
def outputFile = wd + "output.dep" <br>
String[] args = [streamsFile, pointerFile, outputFile] <br>
pluginHost.runPlugin("TotalLengthOfUpstreamChannels", args, false) <br>
</code>
</p>
<h2 class="SeeAlso">Credits:</h2>
<ul>
<li>John Lindsay (2012) email: [email protected]</li>
</ul>
</body>
</html>
| {
"content_hash": "9972ad2d15945ac11e3b53a656c5fee6",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 115,
"avg_line_length": 48.983333333333334,
"alnum_prop": 0.5794487921061585,
"repo_name": "jblindsay/jblindsay.github.io",
"id": "b26eb47432c240c3a81f604a0019d4d6a62ceecb",
"size": "2939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ghrg/Whitebox/WhiteboxGAT-linux/resources/Help/TotalLengthOfUpstreamChannels.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "52"
},
{
"name": "CSS",
"bytes": "1086446"
},
{
"name": "Groovy",
"bytes": "2186907"
},
{
"name": "HTML",
"bytes": "11454480"
},
{
"name": "Java",
"bytes": "5373007"
},
{
"name": "JavaScript",
"bytes": "1745824"
},
{
"name": "Python",
"bytes": "82583"
},
{
"name": "SCSS",
"bytes": "173472"
},
{
"name": "TeX",
"bytes": "15589"
}
],
"symlink_target": ""
} |
- Add support for iteratable maps
- fix to not try to unobserve `null` objects
## **1.1.2**
Fix NPM release
## **1.1.1**
Cleanup Sourcemaps
## **1.1.0**
Add browser bundle.
## **1.0.0**
Change how the path of a change is constructed. Now the parent(s) of each object are tracked, instead of the path directly. This is less error-prone, especially for arrays (array shifts etc.). | {
"content_hash": "bfacfea54a81482cd3e6f981265dcf1f",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 192,
"avg_line_length": 21.444444444444443,
"alnum_prop": 0.694300518134715,
"repo_name": "rkusa/nested-observe",
"id": "637a4f46dfb2bd7020951cb6a3cd0338c613f49e",
"size": "400",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CHANGELOG.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "32800"
}
],
"symlink_target": ""
} |
// SDLAmbientLightStatus.h
//
#import "SDLEnum.h"
/**
* Reflects the status of the ambient light sensor for headlamps
*
* @since SDL 3.0
*/
typedef SDLEnum SDLAmbientLightStatus SDL_SWIFT_ENUM;
/**
* Represents a "night" ambient light status
*/
extern SDLAmbientLightStatus const SDLAmbientLightStatusNight;
/**
* Represents a "twilight 1" ambient light status
*/
extern SDLAmbientLightStatus const SDLAmbientLightStatusTwilight1;
/**
* Represents a "twilight 2" ambient light status
*/
extern SDLAmbientLightStatus const SDLAmbientLightStatusTwilight2;
/**
* Represents a "twilight 3" ambient light status
*/
extern SDLAmbientLightStatus const SDLAmbientLightStatusTwilight3;
/**
* Represents a "twilight 4" ambient light status
*/
extern SDLAmbientLightStatus const SDLAmbientLightStatusTwilight4;
/**
* Represents a "day" ambient light status
*/
extern SDLAmbientLightStatus const SDLAmbientLightStatusDay;
/**
* Represents an "unknown" ambient light status
*/
extern SDLAmbientLightStatus const SDLAmbientLightStatusUnknown;
/**
* Represents a "invalid" ambient light status
*/
extern SDLAmbientLightStatus const SDLAmbientLightStatusInvalid;
| {
"content_hash": "f864c2a66c3d8d4e22169b102288441d",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 66,
"avg_line_length": 22.673076923076923,
"alnum_prop": 0.7786259541984732,
"repo_name": "APCVSRepo/sdl_ios",
"id": "f4663ec616fb9a8b5a95d6dd8593a81e357509f2",
"size": "1179",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "SmartDeviceLink/SDLAmbientLightStatus.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1149"
},
{
"name": "HTML",
"bytes": "1587"
},
{
"name": "Objective-C",
"bytes": "3030944"
},
{
"name": "Ruby",
"bytes": "27487"
},
{
"name": "Shell",
"bytes": "240"
},
{
"name": "Swift",
"bytes": "73559"
}
],
"symlink_target": ""
} |
<!--
@MAC-ALLOW:invalid=*
@WIN-ALLOW:checkable:*
@WIN-DENY:name=''
-->
<html>
<body>
<div aria-invalid="true">invalid=true</div>
<div aria-invalid="spelling">invalid=spelling</div>
<div aria-invalid="grammar">invalid=grammar</div>
<div aria-invalid="false">invalid=false</div>
<div aria-invalid="">invalid=<empty></div>
<div aria-invalid="somethingelse">invalid=somethingelse</div>
</body>
</html>
| {
"content_hash": "a007b2cf5591e26627b3bc35bb606e57",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 63,
"avg_line_length": 27.733333333333334,
"alnum_prop": 0.6875,
"repo_name": "timopulkkinen/BubbleFish",
"id": "80d86ccd23bf354d46e47b5c1afed2d1ad7796cc",
"size": "416",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "content/test/data/accessibility/aria-invalid.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "1174304"
},
{
"name": "Awk",
"bytes": "9519"
},
{
"name": "C",
"bytes": "75801820"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "161884021"
},
{
"name": "DOT",
"bytes": "1559"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Java",
"bytes": "3531849"
},
{
"name": "JavaScript",
"bytes": "18556005"
},
{
"name": "Logos",
"bytes": "4517"
},
{
"name": "Matlab",
"bytes": "5234"
},
{
"name": "Objective-C",
"bytes": "7254742"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "933011"
},
{
"name": "Python",
"bytes": "8808682"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3621"
},
{
"name": "Shell",
"bytes": "1537764"
},
{
"name": "Tcl",
"bytes": "277077"
},
{
"name": "XML",
"bytes": "13493"
}
],
"symlink_target": ""
} |
<?php
namespace Drupal\Tests\views\Unit\Plugin\views\field;
use Drupal\Core\Entity\EntityRepositoryInterface;
use Drupal\Core\Entity\EntityTypeManagerInterface;
use Drupal\Tests\UnitTestCase;
use Drupal\views\Plugin\views\field\EntityOperations;
use Drupal\views\ResultRow;
/**
* @coversDefaultClass \Drupal\views\Plugin\views\field\EntityOperations
* @group Views
*/
class EntityOperationsUnitTest extends UnitTestCase {
/**
* The entity type manager.
*
* @var \Drupal\Core\Entity\EntityTypeManagerInterface|\PHPUnit\Framework\MockObject\MockObject
*/
protected $entityTypeManager;
/**
* The entity repository.
*
* @var \Drupal\Core\Entity\EntityRepositoryInterface|\PHPUnit\Framework\MockObject\MockObject
*/
protected $entityRepository;
/**
* The language manager.
*
* @var \Drupal\Core\Language\LanguageManagerInterface|\PHPUnit\Framework\MockObject\MockObject
*/
protected $languageManager;
/**
* The plugin under test.
*
* @var \Drupal\views\Plugin\views\field\EntityOperations
*/
protected $plugin;
/**
* {@inheritdoc}
*
* @covers ::__construct
*/
protected function setUp(): void {
$this->entityTypeManager = $this->createMock(EntityTypeManagerInterface::class);
$this->entityRepository = $this->createMock(EntityRepositoryInterface::class);
$this->languageManager = $this->createMock('\Drupal\Core\Language\LanguageManagerInterface');
$configuration = [];
$plugin_id = $this->randomMachineName();
$plugin_definition = [
'title' => $this->randomMachineName(),
];
$this->plugin = new EntityOperations($configuration, $plugin_id, $plugin_definition, $this->entityTypeManager, $this->languageManager, $this->entityRepository);
$redirect_service = $this->createMock('Drupal\Core\Routing\RedirectDestinationInterface');
$redirect_service->expects($this->any())
->method('getAsArray')
->willReturn(['destination' => 'foobar']);
$this->plugin->setRedirectDestination($redirect_service);
$view = $this->getMockBuilder('\Drupal\views\ViewExecutable')
->disableOriginalConstructor()
->getMock();
$display = $this->getMockBuilder('\Drupal\views\Plugin\views\display\DisplayPluginBase')
->disableOriginalConstructor()
->getMockForAbstractClass();
$view->display_handler = $display;
$this->plugin->init($view, $display);
}
/**
* @covers ::usesGroupBy
*/
public function testUsesGroupBy() {
$this->assertFalse($this->plugin->usesGroupBy());
}
/**
* @covers ::defineOptions
*/
public function testDefineOptions() {
$options = $this->plugin->defineOptions();
$this->assertIsArray($options);
$this->assertArrayHasKey('destination', $options);
}
/**
* @covers ::render
*/
public function testRenderWithDestination() {
$entity_type_id = $this->randomMachineName();
$entity = $this->getMockBuilder('\Drupal\user\Entity\Role')
->disableOriginalConstructor()
->getMock();
$entity->expects($this->any())
->method('getEntityTypeId')
->will($this->returnValue($entity_type_id));
$operations = [
'foo' => [
'title' => $this->randomMachineName(),
],
];
$list_builder = $this->createMock('\Drupal\Core\Entity\EntityListBuilderInterface');
$list_builder->expects($this->once())
->method('getOperations')
->with($entity)
->will($this->returnValue($operations));
$this->entityTypeManager->expects($this->once())
->method('getListBuilder')
->with($entity_type_id)
->will($this->returnValue($list_builder));
$this->plugin->options['destination'] = TRUE;
$result = new ResultRow();
$result->_entity = $entity;
$expected_build = [
'#type' => 'operations',
'#links' => $operations,
];
$expected_build['#links']['foo']['query'] = ['destination' => 'foobar'];
$build = $this->plugin->render($result);
$this->assertSame($expected_build, $build);
}
/**
* @covers ::render
*/
public function testRenderWithoutDestination() {
$entity_type_id = $this->randomMachineName();
$entity = $this->getMockBuilder('\Drupal\user\Entity\Role')
->disableOriginalConstructor()
->getMock();
$entity->expects($this->any())
->method('getEntityTypeId')
->will($this->returnValue($entity_type_id));
$operations = [
'foo' => [
'title' => $this->randomMachineName(),
],
];
$list_builder = $this->createMock('\Drupal\Core\Entity\EntityListBuilderInterface');
$list_builder->expects($this->once())
->method('getOperations')
->with($entity)
->will($this->returnValue($operations));
$this->entityTypeManager->expects($this->once())
->method('getListBuilder')
->with($entity_type_id)
->will($this->returnValue($list_builder));
$this->plugin->options['destination'] = FALSE;
$result = new ResultRow();
$result->_entity = $entity;
$expected_build = [
'#type' => 'operations',
'#links' => $operations,
];
$build = $this->plugin->render($result);
$this->assertSame($expected_build, $build);
}
}
| {
"content_hash": "a45f385932a3068391f6573c669fa48b",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 164,
"avg_line_length": 29.468926553672315,
"alnum_prop": 0.6466641104294478,
"repo_name": "electric-eloquence/fepper-drupal",
"id": "b5daf4ab1da14eaedb641caea1d31d8ae641b604",
"size": "5216",
"binary": false,
"copies": "12",
"ref": "refs/heads/dev",
"path": "backend/drupal/core/modules/views/tests/src/Unit/Plugin/views/field/EntityOperationsUnitTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2300765"
},
{
"name": "HTML",
"bytes": "68444"
},
{
"name": "JavaScript",
"bytes": "2453602"
},
{
"name": "Mustache",
"bytes": "40698"
},
{
"name": "PHP",
"bytes": "41684915"
},
{
"name": "PowerShell",
"bytes": "755"
},
{
"name": "Shell",
"bytes": "72896"
},
{
"name": "Stylus",
"bytes": "32803"
},
{
"name": "Twig",
"bytes": "1820730"
},
{
"name": "VBScript",
"bytes": "466"
}
],
"symlink_target": ""
} |
require 'date'
module Dynamini
module TypeHandler
GETTER_PROCS = {
integer: proc { |v| v.to_i if v },
date: proc do |v|
if v.is_a?(Date)
v
elsif v
Time.methods.include?(:zone) ? Time.zone.at(v).to_date : Time.at(v).to_date
end
end,
time: proc do |v|
if v
Time.methods.include?(:zone) ? Time.zone.at(v.to_f) : Time.at(v.to_f)
end
end,
float: proc { |v| v.to_f if v },
symbol: proc { |v| v.to_sym if v },
string: proc { |v| v.to_s if v },
boolean: proc { |v| v },
array: proc { |v| (v.is_a?(Enumerable) ? v.to_a : [v]) if v },
set: proc { |v| (v.is_a?(Enumerable) ? Set.new(v) : Set.new([v])) if v }
}.freeze
SETTER_PROCS = {
integer: proc { |v| v.to_i if v },
time: proc { |v| (v.is_a?(Date) ? v.to_time : v).to_f if v },
float: proc { |v| v.to_f if v },
symbol: proc { |v| v.to_s if v },
string: proc { |v| v.to_s if v },
boolean: proc { |v| v },
date: proc { |v| v.to_time.to_f if v },
array: proc { |v| (v.is_a?(Enumerable) ? v.to_a : [v]) if v },
set: proc { |v| (v.is_a?(Enumerable) ? Set.new(v) : Set.new([v])) if v }
}.freeze
def handle(column, format_class, options = {})
validate_handle(format_class, options)
options[:default] ||= format_default(format_class)
options[:default] ||= Set.new if format_class == :set
self.handles = self.handles.merge(column => { format: format_class, options: options })
define_handled_getter(column, format_class, options)
define_handled_setter(column, format_class)
end
def define_handled_getter(column, format_class, _options = {})
proc = GETTER_PROCS[format_class]
fail 'Unsupported data type: ' + format_class.to_s if proc.nil?
define_method(column) do
read_attribute(column)
end
end
def define_handled_setter(column, format_class)
method_name = (column.to_s + '=')
proc = SETTER_PROCS[format_class]
fail 'Unsupported data type: ' + format_class.to_s if proc.nil?
define_method(method_name) do |value|
write_attribute(column, value)
end
end
def format_default(format_class)
case format_class
when :array
[]
when :set
Set.new
end
end
def validate_handle(format, options)
if format == :set
if options[:of] && [:set, :array].include?(options[:of])
raise ArgumentError, 'Invalid handle: cannot store non-primitive datatypes within a set.'
end
end
end
def handled_key(column, value)
if handles[column]
attribute_callback(GETTER_PROCS, handles[column], value, false)
else
value
end
end
def attribute_callback(procs, handle, value, validate)
value = handle[:options][:default] if value.nil?
callback = procs[handle[:format]]
if should_convert_elements?(handle, value)
result = convert_elements(value, procs[handle[:options][:of]])
callback.call(result)
elsif validate && invalid_enumerable_value?(handle, value)
raise ArgumentError, "Can't write a non-enumerable value to field handled as #{handle[:format]}"
else
callback.call(value)
end
end
def should_convert_elements?(handle, value)
handle[:options][:of] && (value.is_a?(Array) || value.is_a?(Set))
end
def invalid_enumerable_value?(handle, value)
handled_as?(handle, [:array, :set]) && !value.is_a?(Enumerable)
end
def convert_elements(enumerable, callback)
enumerable.map { |e| callback.call(e) }
end
def handled_as?(handle, type)
type.include? handle[:format]
end
end
end
| {
"content_hash": "149ebb60a11a18c397601aad82d86149",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 104,
"avg_line_length": 31.056451612903224,
"alnum_prop": 0.5692028044663724,
"repo_name": "47colborne/dynamini",
"id": "cd7c8d23f7ccb5de0e7c34d440b3d92241187276",
"size": "3851",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/dynamini/type_handler.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "131772"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico" />
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0" />
<meta name="theme-color" content="#000000" />
<!--
manifest.json provides metadata used when your web app is added to the
homescreen on Android. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>Web FEM</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
</body>
</html>
| {
"content_hash": "a75445711eedd3316f80a15fda69025e",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 113,
"avg_line_length": 42.214285714285715,
"alnum_prop": 0.6590524534686971,
"repo_name": "gorankami/WebFEM",
"id": "a97d877e453359989f0272cf530e96b3f39b2141",
"size": "1182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/index.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1431"
},
{
"name": "GLSL",
"bytes": "954"
},
{
"name": "HTML",
"bytes": "5522"
},
{
"name": "JavaScript",
"bytes": "46164"
}
],
"symlink_target": ""
} |
#include <aws/swf/model/RequestCancelWorkflowExecutionRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::SWF::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
RequestCancelWorkflowExecutionRequest::RequestCancelWorkflowExecutionRequest() :
m_domainHasBeenSet(false),
m_workflowIdHasBeenSet(false),
m_runIdHasBeenSet(false)
{
}
Aws::String RequestCancelWorkflowExecutionRequest::SerializePayload() const
{
JsonValue payload;
if(m_domainHasBeenSet)
{
payload.WithString("domain", m_domain);
}
if(m_workflowIdHasBeenSet)
{
payload.WithString("workflowId", m_workflowId);
}
if(m_runIdHasBeenSet)
{
payload.WithString("runId", m_runId);
}
return payload.WriteReadable();
}
Aws::Http::HeaderValueCollection RequestCancelWorkflowExecutionRequest::GetRequestSpecificHeaders() const
{
Aws::Http::HeaderValueCollection headers;
headers.insert(Aws::Http::HeaderValuePair("X-Amz-Target", "SimpleWorkflowService.CancelWorkflowExecution"));
return headers;
}
| {
"content_hash": "ab09b63153ee7f00080e8d5b24891425",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 110,
"avg_line_length": 20.78846153846154,
"alnum_prop": 0.759481961147086,
"repo_name": "ambasta/aws-sdk-cpp",
"id": "79305510e7945f88f56829adad970739a49c1ad8",
"size": "1654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-cpp-sdk-swf/source/model/RequestCancelWorkflowExecutionRequest.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2305"
},
{
"name": "C++",
"bytes": "74273816"
},
{
"name": "CMake",
"bytes": "412257"
},
{
"name": "Java",
"bytes": "229873"
},
{
"name": "Python",
"bytes": "62933"
}
],
"symlink_target": ""
} |
package rx.internal.operators;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import rx.Observer;
import rx.Subscriber;
import rx.functions.Action0;
import rx.observers.Subscribers;
import rx.subjects.Subject;
import rx.subscriptions.Subscriptions;
/**
* A solution to the "time gap" problem that occurs with {@code groupBy} and {@code pivot}.
* <p>
* This currently has temporary unbounded buffers. It needs to become bounded and then do one of two things:
* <ol>
* <li>blow up and make the user do something about it</li>
* <li>work with the backpressure solution ... still to be implemented (such as co-routines)</li>
* </ol><p>
* Generally the buffer should be very short lived (milliseconds) and then stops being involved. It can become a
* memory leak though if a {@code GroupedObservable} backed by this class is emitted but never subscribed to
* (such as filtered out). In that case, either a time-bomb to throw away the buffer, or just blowing up and
* making the user do something about it is needed.
* <p>
* For example, to filter out {@code GroupedObservable}s, perhaps they need a silent {@code subscribe()} on them
* to just blackhole the data.
* <p>
* This is an initial start at solving this problem and solves the immediate problem of {@code groupBy} and
* {@code pivot} and trades off the possibility of memory leak for deterministic functionality.
*
* @see <a href="https://github.com/Netflix/RxJava/issues/844">the Github issue describing the time gap problem</a>
* @warn type param "T" undescribed
* @param <T>
*/
public class BufferUntilSubscriber<T> extends Subject<T, T> {
/**
* @warn create() undescribed
*/
public static <T> BufferUntilSubscriber<T> create() {
State<T> state = new State<T>();
return new BufferUntilSubscriber<T>(state);
}
/** The common state. */
static final class State<T> {
/** The first observer or the one which buffers until the first arrives. */
volatile Observer<? super T> observerRef = new BufferedObserver<T>();
/** Allow a single subscriber only. */
volatile int first;
/** Field updater for observerRef. */
@SuppressWarnings("rawtypes")
static final AtomicReferenceFieldUpdater<State, Observer> OBSERVER_UPDATER
= AtomicReferenceFieldUpdater.newUpdater(State.class, Observer.class, "observerRef");
/** Field updater for first. */
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<State> FIRST_UPDATER
= AtomicIntegerFieldUpdater.newUpdater(State.class, "first");
boolean casFirst(int expected, int next) {
return FIRST_UPDATER.compareAndSet(this, expected, next);
}
void setObserverRef(Observer<? super T> o) {
observerRef = o;
}
boolean casObserverRef(Observer<? super T> expected, Observer<? super T> next) {
return OBSERVER_UPDATER.compareAndSet(this, expected, next);
}
}
static final class OnSubscribeAction<T> implements OnSubscribe<T> {
final State<T> state;
public OnSubscribeAction(State<T> state) {
this.state = state;
}
@Override
public void call(final Subscriber<? super T> s) {
if (state.casFirst(0, 1)) {
final NotificationLite<T> nl = NotificationLite.instance();
// drain queued notifications before subscription
// we do this here before PassThruObserver so the consuming thread can do this before putting itself in the line of the producer
BufferedObserver<? super T> buffered = (BufferedObserver<? super T>)state.observerRef;
Object o;
while ((o = buffered.buffer.poll()) != null) {
nl.accept(s, o);
}
// register real observer for pass-thru ... and drain any further events received on first notification
state.setObserverRef(new PassThruObserver<T>(s, buffered.buffer, state));
s.add(Subscriptions.create(new Action0() {
@Override
public void call() {
state.setObserverRef(Subscribers.empty());
}
}));
} else {
s.onError(new IllegalStateException("Only one subscriber allowed!"));
}
}
}
final State<T> state;
private BufferUntilSubscriber(State<T> state) {
super(new OnSubscribeAction<T>(state));
this.state = state;
}
@Override
public void onCompleted() {
state.observerRef.onCompleted();
}
@Override
public void onError(Throwable e) {
state.observerRef.onError(e);
}
@Override
public void onNext(T t) {
state.observerRef.onNext(t);
}
/**
* This is a temporary observer between buffering and the actual that gets into the line of notifications
* from the producer and will drain the queue of any items received during the race of the initial drain and
* switching this.
*
* It will then immediately swap itself out for the actual (after a single notification), but since this is
* now being done on the same producer thread no further buffering will occur.
*/
private static final class PassThruObserver<T> extends Subscriber<T> {
private final Observer<? super T> actual;
// this assumes single threaded synchronous notifications (the Rx contract for a single Observer)
private final ConcurrentLinkedQueue<Object> buffer;
private final State<T> state;
PassThruObserver(Observer<? super T> actual, ConcurrentLinkedQueue<Object> buffer,
State<T> state) {
this.actual = actual;
this.buffer = buffer;
this.state = state;
}
@Override
public void onCompleted() {
drainIfNeededAndSwitchToActual();
actual.onCompleted();
}
@Override
public void onError(Throwable e) {
drainIfNeededAndSwitchToActual();
actual.onError(e);
}
@Override
public void onNext(T t) {
drainIfNeededAndSwitchToActual();
actual.onNext(t);
}
private void drainIfNeededAndSwitchToActual() {
final NotificationLite<T> nl = NotificationLite.instance();
Object o;
while ((o = buffer.poll()) != null) {
nl.accept(this, o);
}
// now we can safely change over to the actual and get rid of the pass-thru
// but only if not unsubscribed
state.casObserverRef(this, actual);
}
}
private static final class BufferedObserver<T> extends Subscriber<T> {
private final ConcurrentLinkedQueue<Object> buffer = new ConcurrentLinkedQueue<Object>();
private static final NotificationLite<Object> nl = NotificationLite.instance();
@Override
public void onCompleted() {
buffer.add(nl.completed());
}
@Override
public void onError(Throwable e) {
buffer.add(nl.error(e));
}
@Override
public void onNext(T t) {
buffer.add(nl.next(t));
}
}
}
| {
"content_hash": "8132e9e8b559735e9b42d48a24448afe",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 144,
"avg_line_length": 37.592039800995025,
"alnum_prop": 0.6275807305452621,
"repo_name": "stealthcode/RxJava",
"id": "39ea385e18f67697b03354594e7bcf2977ac26fb",
"size": "8152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rxjava-core/src/main/java/rx/internal/operators/BufferUntilSubscriber.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE ldml SYSTEM "../../common/dtd/ldml.dtd">
<!-- Copyright © 1991-2013 Unicode, Inc.
CLDR data files are interpreted according to the LDML specification (http://unicode.org/reports/tr35/)
For terms of use, see http://www.unicode.org/copyright.html
-->
<ldml>
<identity>
<version number="$Revision: 11914 $"/>
<language type="lu"/>
</identity>
<metadata>
<casingData>
<casingItem type="calendar_field">titlecase</casingItem>
<casingItem type="currencyName">titlecase</casingItem>
<casingItem type="day_format_except_narrow">titlecase</casingItem>
<casingItem type="day_narrow">titlecase</casingItem>
<casingItem type="day_standalone_except_narrow">titlecase</casingItem>
<casingItem type="era_abbr">lowercase</casingItem>
<casingItem type="era_name">titlecase</casingItem>
<casingItem type="era_narrow">lowercase</casingItem>
<casingItem type="language">titlecase</casingItem>
<casingItem type="month_format_except_narrow">titlecase</casingItem>
<casingItem type="month_narrow">titlecase</casingItem>
<casingItem type="month_standalone_except_narrow">titlecase</casingItem>
<casingItem type="quarter_abbreviated">titlecase</casingItem>
<casingItem type="quarter_format_wide">titlecase</casingItem>
<casingItem type="quarter_standalone_wide">titlecase</casingItem>
<casingItem type="relative">titlecase</casingItem>
<casingItem type="symbol">titlecase</casingItem>
<casingItem type="territory">titlecase</casingItem>
</casingData>
</metadata>
</ldml>
| {
"content_hash": "a93e3d106bdcda8a3d0340104379039e",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 102,
"avg_line_length": 45.94117647058823,
"alnum_prop": 0.7413572343149808,
"repo_name": "NathanBWaters/jb_club",
"id": "08eee5ceb1f9d925a4a5f939849d175724ae0677",
"size": "1563",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "node_modules/cldr/3rdparty/cldr/common/casing/lu.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "1526"
},
{
"name": "CSS",
"bytes": "1507"
},
{
"name": "HTML",
"bytes": "9043"
},
{
"name": "JavaScript",
"bytes": "79757"
}
],
"symlink_target": ""
} |
//
// RWTableViewController.m
// UDo
//
// Created by Soheil Azarpour on 12/21/13.
// Copyright (c) 2013 Ray Wenderlich. All rights reserved.
//
#import "RWTableViewController.h"
#import "RWBasicTableViewCell.h"
#import "UIAlertView+RWBlock.h"
@interface RWTableViewController ()
@property (strong, nonatomic) NSMutableArray *_objects;
@end
@implementation RWTableViewController{
NSArray *tableData;
}
@synthesize _objects;
#pragma mark - Custom accessors
- (NSMutableArray *)objects {
if (!_objects) {
_objects = [NSMutableArray arrayWithObjects:@"One",@"Two",@"Three",nil];
}
return _objects;
}
- (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath
{
return 100;
}
#pragma mark - View life cycle
- (void)viewDidLoad {
_objects = [NSMutableArray arrayWithObjects:@"Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever sinc",@"Check out the report and finalize it, please", nil];
[self.tableView registerClass: [RWBasicTableViewCell class] forCellReuseIdentifier:@"Cell Identifier"];
[self.tableView setSeparatorStyle:UITableViewCellSeparatorStyleNone];
UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(longPressGestureRecognized:)];
[self.tableView addGestureRecognizer:longPress];
[super viewDidLoad];
}
#pragma mark - UITableView data source and delegate methods
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
return [_objects count];
}
- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView {
// Return the number of sections.
return 3;
}
-(UIView *)tableView:(UITableView *)tableView viewForHeaderInSection:(NSInteger)section
{
UIView *view = [[UIView alloc] initWithFrame:CGRectMake(0, 0, tableView.frame.size.width, 40)];
view.backgroundColor = [UIColor whiteColor];
UILabel *label = [[UILabel alloc] initWithFrame:CGRectMake(19, view.frame.origin.y, view.frame.size.width-19, view.frame.size.height)];
label.textColor = [[UIColor alloc] initWithRed:76/255.0f green:76/255.0f blue:76/255.0f alpha:1.0f];
[label setFont:[UIFont fontWithName:@"BrandonText-Regular" size:12]];
NSString *string = [self.tableView.dataSource tableView:tableView titleForHeaderInSection:section];
[label setText:string];
[view addSubview:label];
return view;
}
- (NSString *)tableView:(UITableView *)tableView titleForHeaderInSection:(NSInteger)section
{
NSString *sectionName;
switch (section)
{
case 0:
sectionName = NSLocalizedString(@"Morning", @"Morning");
break;
case 1:
sectionName = NSLocalizedString(@"Afternoon", @"Afternoon");
break;
case 2:
sectionName = NSLocalizedString(@"Evening", @"Evening");
break;
default:
sectionName = @"Anytime";
break;
}
return sectionName;
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
static NSString *kIdentifier = @"Cell Identifier";
RWBasicTableViewCell *cell = [[RWBasicTableViewCell alloc] init];
if (cell == nil) {
//cell = [collectionView dequeueReusableCellWithReuseIdentifier:cellIdentifier forIndexPath:indexPath];
cell = [tableView dequeueReusableCellWithIdentifier:kIdentifier forIndexPath:indexPath];
}
cell.selectedBackgroundView = [[UIView alloc] init];
cell.selectedBackgroundView.backgroundColor = [UIColor whiteColor];
// Update cell content from data source.
NSString *object = [_objects objectAtIndex:indexPath.row];
NSLog(@"selected tableview row is %ld",(long)indexPath.row);
cell.taskName.text = object;
[cell.taskName sizeToFit];
return cell;
}
- (BOOL)tableView:(UITableView *)tableView canEditRowAtIndexPath:(NSIndexPath *)indexPath {
return YES;
}
- (UITableViewCellEditingStyle)tableView:(UITableView *)tableView editingStyleForRowAtIndexPath:(NSIndexPath *)indexPath {
return UITableViewCellEditingStyleDelete;
}
- (void)tableView:(UITableView *)tableView commitEditingStyle:(UITableViewCellEditingStyle)editingStyle forRowAtIndexPath:(NSIndexPath *)indexPath {
[self.objects removeObjectAtIndex:indexPath.row];
[tableView deleteRowsAtIndexPaths:@[indexPath] withRowAnimation:UITableViewRowAnimationAutomatic];
}
#pragma mark - IBActions
- (IBAction)addButtonPressed:(id)sender {
// Display an alert view with a text input.
UIAlertView *inputAlertView = [[UIAlertView alloc] initWithTitle:@"Add a new to-do item:" message:nil delegate:nil cancelButtonTitle:@"Dismiss" otherButtonTitles:@"Add", nil];
inputAlertView.alertViewStyle = UIAlertViewStylePlainTextInput;
__weak RWTableViewController *weakself = self;
// Add a completion block (using our category to UIAlertView).
[inputAlertView setCompletionBlock:^(UIAlertView *alertView, NSInteger buttonIndex) {
// If user pressed 'Add'...
if (buttonIndex == 1) {
UITextField *textField = [alertView textFieldAtIndex:0];
NSString *string = [textField.text capitalizedString];
[weakself.objects addObject:string];
NSUInteger row = [weakself.objects count] - 1;
NSIndexPath *indexPath = [NSIndexPath indexPathForRow:row inSection:0];
[weakself.tableView insertRowsAtIndexPaths:@[indexPath] withRowAnimation:UITableViewRowAnimationAutomatic];
}
}];
[inputAlertView show];
}
- (IBAction)longPressGestureRecognized:(id)sender {
UILongPressGestureRecognizer *longPress = (UILongPressGestureRecognizer *)sender;
UIGestureRecognizerState state = longPress.state;
CGPoint location = [longPress locationInView:self.tableView];
NSIndexPath *indexPath = [self.tableView indexPathForRowAtPoint:location];
static UIView *snapshot = nil; ///< A snapshot of the row user is moving.
static NSIndexPath *sourceIndexPath = nil; ///< Initial index path, where gesture begins.
switch (state) {
case UIGestureRecognizerStateBegan: {
if (indexPath) {
sourceIndexPath = indexPath;
UITableViewCell *cell = [self.tableView cellForRowAtIndexPath:indexPath];
// Take a snapshot of the selected row using helper method.
snapshot = [self customSnapshoFromView:cell];
// Add the snapshot as subview, centered at cell's center...
__block CGPoint center = cell.center;
snapshot.center = center;
snapshot.alpha = 0.0;
[self.tableView addSubview:snapshot];
[UIView animateWithDuration:0.25 animations:^{
// Offset for gesture location.
center.y = location.y;
snapshot.center = center;
snapshot.transform = CGAffineTransformMakeScale(1.05, 1.05);
snapshot.alpha = 0.98;
cell.alpha = 0.0;
} completion:^(BOOL finished) {
cell.hidden = YES;
}];
}
break;
}
case UIGestureRecognizerStateChanged: {
CGPoint center = snapshot.center;
center.y = location.y;
snapshot.center = center;
// Is destination valid and is it different from source?
//added that the indexPath section has to be the same as the source index path. Can't reorder between sections.
if (indexPath && ![indexPath isEqual:sourceIndexPath] && (indexPath.section == sourceIndexPath.section)) {
// ... update data source.
[self.objects exchangeObjectAtIndex:indexPath.row withObjectAtIndex:sourceIndexPath.row];
// ... move the rows.
[self.tableView moveRowAtIndexPath:sourceIndexPath toIndexPath:indexPath];
// ... and update source so it is in sync with UI changes.
sourceIndexPath = indexPath;
}
break;
}
default: {
// Clean up.
UITableViewCell *cell = [self.tableView cellForRowAtIndexPath:sourceIndexPath];
cell.hidden = NO;
cell.alpha = 0.0;
[UIView animateWithDuration:0.25 animations:^{
snapshot.center = cell.center;
snapshot.transform = CGAffineTransformIdentity;
snapshot.alpha = 0.0;
cell.alpha = 1.0;
} completion:^(BOOL finished) {
sourceIndexPath = nil;
[snapshot removeFromSuperview];
snapshot = nil;
}];
break;
}
}
}
//- (void)tableView:(UITableView *)tableView didHighlightRowAtIndexPath:(NSIndexPath *)indexPath {
// // Add your Colour.
// RWBasicTableViewCell *cell = (RWBasicTableViewCell *)[self.tableView cellForRowAtIndexPath:indexPath];
// [self setCellColor:[UIColor whiteColor] ForCell:cell]; //highlight colour
//}
//
//- (void)tableView:(UITableView *)tableView didUnhighlightRowAtIndexPath:(NSIndexPath *)indexPath {
// // Reset Colour.
// RWBasicTableViewCell *cell = (RWBasicTableViewCell *)[tableView cellForRowAtIndexPath:indexPath];
// [self setCellColor:[UIColor colorWithWhite:0.961 alpha:1.000] ForCell:cell]; //normal color
//}
//
//- (void)setCellColor:(UIColor *)color ForCell:(UITableViewCell *)cell {
// cell.contentView.backgroundColor = color;
// cell.backgroundColor = color;
//}
#pragma mark - Helper methods
/** @brief Returns a customized snapshot of a given view. */
- (UIView *)customSnapshoFromView:(UIView *)inputView {
// Make an image from the input view.
UIGraphicsBeginImageContextWithOptions(inputView.bounds.size, NO, 0);
[inputView.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// Create an image view.
UIView *snapshot = [[UIImageView alloc] initWithImage:image];
snapshot.layer.masksToBounds = NO;
snapshot.layer.cornerRadius = 0.0;
snapshot.layer.shadowOffset = CGSizeMake(-5.0, 0.0);
snapshot.layer.shadowRadius = 5.0;
snapshot.layer.shadowOpacity = 0.4;
return snapshot;
}
@end
| {
"content_hash": "fbefb4ed6e526d155189998cba271fd3",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 246,
"avg_line_length": 33.94039735099338,
"alnum_prop": 0.6914146341463414,
"repo_name": "rtrivedi/RKSwipeBetweenViewControllers-master",
"id": "1510a8f186344ca65e39debab023ae6427dfc3e6",
"size": "10250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RKSwipeBetweenViewControllers/RWTableViewController.m",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "41069"
},
{
"name": "Ruby",
"bytes": "5987"
}
],
"symlink_target": ""
} |
import pickle
from datetime import timedelta
from redis import Redis
from werkzeug.datastructures import CallbackDict
from flask.sessions import SessionInterface, SessionMixin
from util import random_base64
"""
From http://flask.pocoo.org/snippets/75/
modified session ID generation to ensure better randomness
"""
class RedisSession(CallbackDict, SessionMixin):
def __init__(self, initial=None, sid=None, new=False):
def on_update(self_):
self_.modified = True
CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.new = new
self.modified = False
class RedisSessionInterface(SessionInterface):
serializer = pickle
session_class = RedisSession
def __init__(self, redis=None, prefix='session:'):
if redis is None:
redis = Redis()
self.redis = redis
self.prefix = prefix
@staticmethod
def generate_sid():
return random_base64(64)
@staticmethod
def get_redis_expiration_time(app, session):
if session.permanent:
return app.permanent_session_lifetime
return timedelta(days=1)
def open_session(self, app, request):
sid = request.cookies.get(app.session_cookie_name)
if not sid:
sid = self.generate_sid()
return self.session_class(sid=sid, new=True)
val = self.redis.get(self.prefix + sid)
if val is not None:
data = self.serializer.loads(val)
return self.session_class(data, sid=sid)
return self.session_class(sid=sid, new=True)
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
if not session:
self.redis.delete(self.prefix + session.sid)
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain)
return
redis_exp = self.get_redis_expiration_time(app, session)
cookie_exp = self.get_expiration_time(app, session)
val = self.serializer.dumps(dict(session))
self.redis.setex(self.prefix + session.sid, val,
int(redis_exp.total_seconds()))
response.set_cookie(app.session_cookie_name, session.sid,
expires=cookie_exp, httponly=True,
domain=domain)
| {
"content_hash": "2df9b9077c1b4a524b359ed51dadaa5f",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 65,
"avg_line_length": 33.901408450704224,
"alnum_prop": 0.6186123805567096,
"repo_name": "icedevml/webcrypto-rsa-login",
"id": "2081584ec05d6d808148ad0e5f5c52c2427e7876",
"size": "2407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "redis_session.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "137"
},
{
"name": "HTML",
"bytes": "3856"
},
{
"name": "JavaScript",
"bytes": "8616"
},
{
"name": "Python",
"bytes": "8448"
}
],
"symlink_target": ""
} |
<?php
/**
* @file
* Contains \Drupal\field\Tests\EntityReference\EntityReferenceAdminTest.
*/
namespace Drupal\field\Tests\EntityReference;
use Drupal\Core\Entity\Entity;
use Drupal\Core\Field\FieldStorageDefinitionInterface;
use Drupal\field_ui\Tests\FieldUiTestTrait;
use Drupal\node\Entity\Node;
use Drupal\simpletest\WebTestBase;
use Drupal\taxonomy\Entity\Vocabulary;
/**
* Tests for the administrative UI.
*
* @group entity_reference
*/
class EntityReferenceAdminTest extends WebTestBase {
use FieldUiTestTrait;
/**
* Modules to install.
*
* Enable path module to ensure that the selection handler does not fail for
* entities with a path field.
* Enable views_ui module to see the no_view_help text.
*
* @var array
*/
public static $modules = ['node', 'field_ui', 'path', 'taxonomy', 'block', 'views_ui'];
/**
* The name of the content type created for testing purposes.
*
* @var string
*/
protected $type;
/**
* {@inheritdoc}
*/
protected function setUp() {
parent::setUp();
$this->drupalPlaceBlock('system_breadcrumb_block');
// Create a content type, with underscores.
$type_name = strtolower($this->randomMachineName(8)) . '_test';
$type = $this->drupalCreateContentType(array('name' => $type_name, 'type' => $type_name));
$this->type = $type->id();
// Create test user.
$admin_user = $this->drupalCreateUser(array(
'access content',
'administer node fields',
'administer node display',
'administer views',
'create ' . $type_name . ' content',
'edit own ' . $type_name . ' content',
));
$this->drupalLogin($admin_user);
}
/**
* Tests the Entity Reference Admin UI.
*/
public function testFieldAdminHandler() {
$bundle_path = 'admin/structure/types/manage/' . $this->type;
// First step: 'Add new field' on the 'Manage fields' page.
$this->drupalGet($bundle_path . '/fields/add-field');
// Check if the commonly referenced entity types appear in the list.
$this->assertOption('edit-new-storage-type', 'field_ui:entity_reference:node');
$this->assertOption('edit-new-storage-type', 'field_ui:entity_reference:user');
$this->drupalPostForm(NULL, array(
'label' => 'Test label',
'field_name' => 'test',
'new_storage_type' => 'entity_reference',
), t('Save and continue'));
// Node should be selected by default.
$this->assertFieldByName('settings[target_type]', 'node');
// Check that all entity types can be referenced.
$this->assertFieldSelectOptions('settings[target_type]', array_keys(\Drupal::entityManager()->getDefinitions()));
// Second step: 'Field settings' form.
$this->drupalPostForm(NULL, array(), t('Save field settings'));
// The base handler should be selected by default.
$this->assertFieldByName('settings[handler]', 'default:node');
// The base handler settings should be displayed.
$entity_type_id = 'node';
$bundles = entity_get_bundles($entity_type_id);
foreach ($bundles as $bundle_name => $bundle_info) {
$this->assertFieldByName('settings[handler_settings][target_bundles][' . $bundle_name . ']');
}
reset($bundles);
// Test the sort settings.
// Option 0: no sort.
$this->assertFieldByName('settings[handler_settings][sort][field]', '_none');
$this->assertNoFieldByName('settings[handler_settings][sort][direction]');
// Option 1: sort by field.
$this->drupalPostAjaxForm(NULL, array('settings[handler_settings][sort][field]' => 'nid'), 'settings[handler_settings][sort][field]');
$this->assertFieldByName('settings[handler_settings][sort][direction]', 'ASC');
// Test that a non-translatable base field is a sort option.
$this->assertFieldByXPath("//select[@name='settings[handler_settings][sort][field]']/option[@value='nid']");
// Test that a translatable base field is a sort option.
$this->assertFieldByXPath("//select[@name='settings[handler_settings][sort][field]']/option[@value='title']");
// Test that a configurable field is a sort option.
$this->assertFieldByXPath("//select[@name='settings[handler_settings][sort][field]']/option[@value='body.value']");
// Set back to no sort.
$this->drupalPostAjaxForm(NULL, array('settings[handler_settings][sort][field]' => '_none'), 'settings[handler_settings][sort][field]');
$this->assertNoFieldByName('settings[handler_settings][sort][direction]');
// Third step: confirm.
$this->drupalPostForm(NULL, array(
'required' => '1',
'settings[handler_settings][target_bundles][' . key($bundles) . ']' => key($bundles),
), t('Save settings'));
// Check that the field appears in the overview form.
$this->assertFieldByXPath('//table[@id="field-overview"]//tr[@id="field-test"]/td[1]', 'Test label', 'Field was created and appears in the overview page.');
// Check that the field settings form can be submitted again, even when the
// field is required.
// The first 'Edit' link is for the Body field.
$this->clickLink(t('Edit'), 1);
$this->drupalPostForm(NULL, array(), t('Save settings'));
// Switch the target type to 'taxonomy_term' and check that the settings
// specific to its selection handler are displayed.
$field_name = 'node.' . $this->type . '.field_test';
$edit = array(
'settings[target_type]' => 'taxonomy_term',
);
$this->drupalPostForm($bundle_path . '/fields/' . $field_name . '/storage', $edit, t('Save field settings'));
$this->drupalGet($bundle_path . '/fields/' . $field_name);
$this->assertFieldByName('settings[handler_settings][auto_create]');
// Switch the target type to 'user' and check that the settings specific to
// its selection handler are displayed.
$field_name = 'node.' . $this->type . '.field_test';
$edit = array(
'settings[target_type]' => 'user',
);
$this->drupalPostForm($bundle_path . '/fields/' . $field_name . '/storage', $edit, t('Save field settings'));
$this->drupalGet($bundle_path . '/fields/' . $field_name);
$this->assertFieldByName('settings[handler_settings][filter][type]', '_none');
// Switch the target type to 'node'.
$field_name = 'node.' . $this->type . '.field_test';
$edit = array(
'settings[target_type]' => 'node',
);
$this->drupalPostForm($bundle_path . '/fields/' . $field_name . '/storage', $edit, t('Save field settings'));
// Try to select the views handler.
$edit = array(
'settings[handler]' => 'views',
);
$this->drupalPostAjaxForm($bundle_path . '/fields/' . $field_name, $edit, 'settings[handler]');
$this->assertRaw(t('No eligible views were found. <a href=":create">Create a view</a> with an <em>Entity Reference</em> display, or add such a display to an <a href=":existing">existing view</a>.', array(
':create' => \Drupal::url('views_ui.add'),
':existing' => \Drupal::url('entity.view.collection'),
)));
$this->drupalPostForm(NULL, $edit, t('Save settings'));
// If no eligible view is available we should see a message.
$this->assertText('The views entity selection mode requires a view.');
// Enable the entity_reference_test module which creates an eligible view.
$this->container->get('module_installer')->install(array('entity_reference_test'));
$this->resetAll();
$this->drupalGet($bundle_path . '/fields/' . $field_name);
$this->drupalPostAjaxForm($bundle_path . '/fields/' . $field_name, $edit, 'settings[handler]');
$edit = array(
'settings[handler_settings][view][view_and_display]' => 'test_entity_reference:entity_reference_1',
);
$this->drupalPostForm(NULL, $edit, t('Save settings'));
$this->assertResponse(200);
// Switch the target type to 'entity_test'.
$edit = array(
'settings[target_type]' => 'entity_test',
);
$this->drupalPostForm($bundle_path . '/fields/' . $field_name . '/storage', $edit, t('Save field settings'));
$this->drupalGet($bundle_path . '/fields/' . $field_name);
$edit = array(
'settings[handler]' => 'views',
);
$this->drupalPostAjaxForm($bundle_path . '/fields/' . $field_name, $edit, 'settings[handler]');
$edit = array(
'required' => FALSE,
'settings[handler_settings][view][view_and_display]' => 'test_entity_reference_entity_test:entity_reference_1',
);
$this->drupalPostForm(NULL, $edit, t('Save settings'));
$this->assertResponse(200);
// Create a new view and display it as a entity reference.
$edit = array(
'id' => 'node_test_view',
'label' => 'Node Test View',
'show[wizard_key]' => 'node',
'page[create]' => 1,
'page[title]' => 'Test Node View',
'page[path]' => 'test/node/view',
'page[style][style_plugin]' => 'default',
'page[style][row_plugin]' => 'fields',
);
$this->drupalPostForm('admin/structure/views/add', $edit, t('Save and edit'));
$this->drupalPostForm(NULL, array(), t('Duplicate as Entity Reference'));
$this->clickLink(t('Settings'));
$edit = array(
'style_options[search_fields][title]' => 'title',
);
$this->drupalPostForm(NULL, $edit, t('Apply'));
$this->drupalPostForm('admin/structure/views/view/node_test_view/edit/entity_reference_1', array(), t('Save'));
$this->clickLink(t('Settings'));
// Create a test entity reference field.
$field_name = 'test_entity_ref_field';
$edit = array(
'new_storage_type' => 'field_ui:entity_reference:node',
'label' => 'Test Entity Reference Field',
'field_name' => $field_name,
);
$this->drupalPostForm($bundle_path . '/fields/add-field', $edit, t('Save and continue'));
// Set to unlimited.
$edit = array(
'cardinality' => FieldStorageDefinitionInterface::CARDINALITY_UNLIMITED,
);
$this->drupalPostForm(NULL, $edit, t('Save field settings'));
// Add the view to the test field.
$edit = array(
'settings[handler]' => 'views',
);
$this->drupalPostAjaxForm(NULL, $edit, 'settings[handler]');
$edit = array(
'required' => FALSE,
'settings[handler_settings][view][view_and_display]' => 'node_test_view:entity_reference_1',
);
$this->drupalPostForm(NULL, $edit, t('Save settings'));
// Create nodes.
$node1 = Node::create([
'type' => $this->type,
'title' => 'Foo Node',
]);
$node1->save();
$node2 = Node::create([
'type' => $this->type,
'title' => 'Foo Node',
]);
$node2->save();
// Try to add a new node and fill the entity reference field.
$this->drupalGet('node/add/' . $this->type);
$result = $this->xpath('//input[@name="field_test_entity_ref_field[0][target_id]" and contains(@data-autocomplete-path, "/entity_reference_autocomplete/node/views/")]');
$target_url = $this->getAbsoluteUrl($result[0]['data-autocomplete-path']);
$this->drupalGet($target_url, array('query' => array('q' => 'Foo')));
$this->assertRaw($node1->getTitle() . ' (' . $node1->id() . ')');
$this->assertRaw($node2->getTitle() . ' (' . $node2->id() . ')');
// Try to add a new node, fill the entity reference field and submit the
// form.
$this->drupalPostForm('node/add/' . $this->type, [], t('Add another item'));
$edit = array(
'title[0][value]' => 'Example',
'field_test_entity_ref_field[0][target_id]' => 'Foo Node (' . $node1->id() . ')',
'field_test_entity_ref_field[1][target_id]' => 'Foo Node (' . $node2->id() . ')',
);
$this->drupalPostForm(NULL, $edit, t('Save'));
$this->assertResponse(200);
$edit = array(
'title[0][value]' => 'Example',
'field_test_entity_ref_field[0][target_id]' => 'Test'
);
$this->drupalPostForm('node/add/' . $this->type, $edit, t('Save'));
// Assert that entity reference autocomplete field is validated.
$this->assertText(t('There are no entities matching "@entity"', ['@entity' => 'Test']));
$edit = array(
'title[0][value]' => 'Test',
'field_test_entity_ref_field[0][target_id]' => $node1->getTitle()
);
$this->drupalPostForm('node/add/' . $this->type, $edit, t('Save'));
// Assert the results multiple times to avoid sorting problem of nodes with
// the same title.
$this->assertText(t('Multiple entities match this reference;'));
$this->assertText(t("@node1", ['@node1' => $node1->getTitle() . ' (' . $node1->id() . ')']));
$this->assertText(t("@node2", ['@node2' => $node2->getTitle() . ' (' . $node2->id() . ')']));
$edit = array(
'title[0][value]' => 'Test',
'field_test_entity_ref_field[0][target_id]' => $node1->getTitle() . '(' . $node1->id() . ')'
);
$this->drupalPostForm('node/add/' . $this->type, $edit, t('Save'));
$this->assertLink($node1->getTitle());
// Tests adding default values to autocomplete widgets.
Vocabulary::create(array('vid' => 'tags', 'name' => 'tags'))->save();
$taxonomy_term_field_name = $this->createEntityReferenceField('taxonomy_term', 'tags');
$field_path = 'node.' . $this->type . '.field_' . $taxonomy_term_field_name;
$this->drupalGet($bundle_path . '/fields/' . $field_path . '/storage');
$edit = [
'cardinality' => -1,
];
$this->drupalPostForm(NULL, $edit, t('Save field settings'));
$this->drupalGet($bundle_path . '/fields/' . $field_path);
$term_name = $this->randomString();
$edit = [
// This must be set before new entities will be auto-created.
'settings[handler_settings][auto_create]' => 1,
];
$this->drupalPostForm(NULL, $edit, t('Save settings'));
$this->drupalGet($bundle_path . '/fields/' . $field_path);
$edit = [
// A term that doesn't yet exist.
'default_value_input[field_' . $taxonomy_term_field_name . '][0][target_id]' => $term_name,
];
$this->drupalPostForm(NULL, $edit, t('Save settings'));
// The term should now exist.
$term = taxonomy_term_load_multiple_by_name($term_name, 'tags')[1];
$this->assertIdentical(1, count($term), 'Taxonomy term was auto created when set as field default.');
}
/**
* Tests the formatters for the Entity References.
*/
public function testAvailableFormatters() {
// Create a new vocabulary.
Vocabulary::create(array('vid' => 'tags', 'name' => 'tags'))->save();
// Create entity reference field with taxonomy term as a target.
$taxonomy_term_field_name = $this->createEntityReferenceField('taxonomy_term', 'tags');
// Create entity reference field with user as a target.
$user_field_name = $this->createEntityReferenceField('user');
// Create entity reference field with node as a target.
$node_field_name = $this->createEntityReferenceField('node', $this->type);
// Create entity reference field with date format as a target.
$date_format_field_name = $this->createEntityReferenceField('date_format');
// Display all newly created Entity Reference configuration.
$this->drupalGet('admin/structure/types/manage/' . $this->type . '/display');
// Check for Taxonomy Term select box values.
// Test if Taxonomy Term Entity Reference Field has the correct formatters.
$this->assertFieldSelectOptions('fields[field_' . $taxonomy_term_field_name . '][type]', array(
'entity_reference_label',
'entity_reference_entity_id',
'entity_reference_rss_category',
'entity_reference_entity_view',
'hidden',
));
// Test if User Reference Field has the correct formatters.
// Author should be available for this field.
// RSS Category should not be available for this field.
$this->assertFieldSelectOptions('fields[field_' . $user_field_name . '][type]', array(
'author',
'entity_reference_entity_id',
'entity_reference_entity_view',
'entity_reference_label',
'hidden',
));
// Test if Node Entity Reference Field has the correct formatters.
// RSS Category should not be available for this field.
$this->assertFieldSelectOptions('fields[field_' . $node_field_name . '][type]', array(
'entity_reference_label',
'entity_reference_entity_id',
'entity_reference_entity_view',
'hidden',
));
// Test if Date Format Reference Field has the correct formatters.
// RSS Category & Entity View should not be available for this field.
// This could be any field without a ViewBuilder.
$this->assertFieldSelectOptions('fields[field_' . $date_format_field_name . '][type]', array(
'entity_reference_label',
'entity_reference_entity_id',
'hidden',
));
}
/**
* Creates a new Entity Reference fields with a given target type.
*
* @param $target_type
* The name of the target type
* @param $bundle
* Name of the bundle
* Default = NULL
* @return string
* Returns the generated field name
*/
public function createEntityReferenceField($target_type, $bundle = NULL) {
// Generates a bundle path for the newly created content type.
$bundle_path = 'admin/structure/types/manage/' . $this->type;
// Generate a random field name, must be only lowercase characters.
$field_name = strtolower($this->randomMachineName());
$storage_edit = $field_edit = array();
$storage_edit['settings[target_type]'] = $target_type;
if ($bundle) {
$field_edit['settings[handler_settings][target_bundles][' . $bundle . ']'] = TRUE;
}
$this->fieldUIAddNewField($bundle_path, $field_name, NULL, 'entity_reference', $storage_edit, $field_edit);
// Returns the generated field name.
return $field_name;
}
/**
* Checks if a select element contains the specified options.
*
* @param string $name
* The field name.
* @param array $expected_options
* An array of expected options.
*
* @return bool
* TRUE if the assertion succeeded, FALSE otherwise.
*/
protected function assertFieldSelectOptions($name, array $expected_options) {
$xpath = $this->buildXPathQuery('//select[@name=:name]', array(':name' => $name));
$fields = $this->xpath($xpath);
if ($fields) {
$field = $fields[0];
$options = $this->getAllOptionsList($field);
sort($options);
sort($expected_options);
return $this->assertIdentical($options, $expected_options);
}
else {
return $this->fail('Unable to find field ' . $name);
}
}
/**
* Extracts all options from a select element.
*
* @param \SimpleXMLElement $element
* The select element field information.
*
* @return array
* An array of option values as strings.
*/
protected function getAllOptionsList(\SimpleXMLElement $element) {
$options = array();
// Add all options items.
foreach ($element->option as $option) {
$options[] = (string) $option['value'];
}
// Loops trough all the option groups
foreach ($element->optgroup as $optgroup) {
$options = array_merge($this->getAllOptionsList($optgroup), $options);
}
return $options;
}
}
| {
"content_hash": "120ac196d5d8470ebf073e86f8954f31",
"timestamp": "",
"source": "github",
"line_count": 488,
"max_line_length": 208,
"avg_line_length": 39.18647540983606,
"alnum_prop": 0.6309679443601945,
"repo_name": "edwardchan/d8-drupalvm",
"id": "6ed4843ac3d99ab9f377037a6660341dcb4dffbc",
"size": "19123",
"binary": false,
"copies": "58",
"ref": "refs/heads/master",
"path": "drupal/core/modules/field/src/Tests/EntityReference/EntityReferenceAdminTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "50616"
},
{
"name": "CSS",
"bytes": "1250515"
},
{
"name": "HTML",
"bytes": "646836"
},
{
"name": "JavaScript",
"bytes": "1018469"
},
{
"name": "PHP",
"bytes": "31387248"
},
{
"name": "Shell",
"bytes": "675"
},
{
"name": "SourcePawn",
"bytes": "31943"
}
],
"symlink_target": ""
} |
<?php
namespace Laminas\Cache\Storage\Plugin;
use Laminas\Cache\Storage\Event;
use Laminas\EventManager\EventManagerInterface;
class IgnoreUserAbort extends AbstractPlugin
{
/**
* The storage who activated ignore_user_abort.
*
* @var null|\Laminas\Cache\Storage\StorageInterface
*/
protected $activatedTarget = null;
/**
* {@inheritDoc}
*/
public function attach(EventManagerInterface $events, $priority = 1)
{
$cbOnBefore = [$this, 'onBefore'];
$cbOnAfter = [$this, 'onAfter'];
$this->listeners[] = $events->attach('setItem.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('setItem.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('setItem.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('setItems.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('setItems.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('setItems.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('addItem.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('addItem.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('addItem.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('addItems.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('addItems.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('addItems.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('replaceItem.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('replaceItem.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('replaceItem.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('replaceItems.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('replaceItems.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('replaceItems.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('checkAndSetItem.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('checkAndSetItem.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('checkAndSetItem.exception', $cbOnAfter, $priority);
// increment / decrement item(s)
$this->listeners[] = $events->attach('incrementItem.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('incrementItem.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('incrementItem.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('incrementItems.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('incrementItems.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('incrementItems.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('decrementItem.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('decrementItem.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('decrementItem.exception', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('decrementItems.pre', $cbOnBefore, $priority);
$this->listeners[] = $events->attach('decrementItems.post', $cbOnAfter, $priority);
$this->listeners[] = $events->attach('decrementItems.exception', $cbOnAfter, $priority);
}
/**
* Activate ignore_user_abort if not already done
* and save the target who activated it.
*
* @param Event $event
* @return void
*/
public function onBefore(Event $event)
{
if ($this->activatedTarget === null && ! ignore_user_abort(true)) {
$this->activatedTarget = $event->getStorage();
}
}
/**
* Reset ignore_user_abort if it's activated and if it's the same target
* who activated it.
*
* If exit_on_abort is enabled and the connection has been aborted
* exit the script.
*
* @param Event $event
* @return void
*/
public function onAfter(Event $event)
{
if ($this->activatedTarget === $event->getStorage()) {
// exit if connection aborted
if ($this->getOptions()->getExitOnAbort() && connection_aborted()) {
exit;
}
// reset ignore_user_abort
ignore_user_abort(false);
// remove activated target
$this->activatedTarget = null;
}
}
}
| {
"content_hash": "c2c640a418bfe73e91a99851a3aa7a55",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 97,
"avg_line_length": 42.71818181818182,
"alnum_prop": 0.6199191317301553,
"repo_name": "deforay/odkdash",
"id": "0a129f6b14af892a30d902b26756f481455d99b7",
"size": "4699",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/laminas/laminas-cache/src/Storage/Plugin/IgnoreUserAbort.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2671717"
},
{
"name": "HTML",
"bytes": "974515"
},
{
"name": "JavaScript",
"bytes": "2257422"
},
{
"name": "Makefile",
"bytes": "285"
},
{
"name": "PHP",
"bytes": "644826"
},
{
"name": "Python",
"bytes": "16281"
},
{
"name": "Shell",
"bytes": "1061"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.3"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.3 http://maven.apache.org/xsd/assembly-1.1.3.xsd">
<id>release</id>
<formats>
<format>zip</format>
</formats>
<dependencySets>
<dependencySet>
<excludes>
<exclude>com.github.waffle:*</exclude>
<exclude>org.osgi:*</exclude>
<exclude>com.ongres.scram:*</exclude>
<exclude>com.ongres.stringprep:*</exclude>
<exclude>net.java.dev.jna:*</exclude>
<exclude>org.slf4j:*</exclude>
<exclude>com.github.ben-manes.caffeine:*</exclude>
<exclude>org.junit.jupiter:*</exclude>
<exclude>org.junit.platform:*</exclude>
<exclude>org.opentest4j:*</exclude>
<exclude>org.apiguardian:*</exclude>
</excludes>
<unpack>false</unpack>
<scope>compile</scope>
<useTransitiveDependencies>true</useTransitiveDependencies>
<useProjectArtifact>true</useProjectArtifact>
<outputDirectory>/</outputDirectory>
</dependencySet>
</dependencySets>
</assembly>
| {
"content_hash": "9401c0612a2c5462e0c5cd5291d8fb91",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 147,
"avg_line_length": 44.25,
"alnum_prop": 0.58545197740113,
"repo_name": "aws/amazon-redshift-jdbc-driver",
"id": "59be0541481547cb04f8379aabf1f9d0f769eccd",
"size": "1416",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/assembly/assembly.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "2554929"
},
{
"name": "Shell",
"bytes": "2240"
}
],
"symlink_target": ""
} |
#include <Mragpp/Box2D/Dynamics/Contacts/b2ChainAndPolygonContact.h>
#include <Mragpp/Box2D/Common/b2BlockAllocator.h>
#include <Mragpp/Box2D/Dynamics/b2Fixture.h>
#include <Mragpp/Box2D/Collision/Shapes/b2ChainShape.h>
#include <Mragpp/Box2D/Collision/Shapes/b2EdgeShape.h>
#include <new>
b2Contact* b2ChainAndPolygonContact::Create(b2Fixture* fixtureA, int32 indexA, b2Fixture* fixtureB, int32 indexB, b2BlockAllocator* allocator)
{
void* mem = allocator->Allocate(sizeof(b2ChainAndPolygonContact));
return new (mem) b2ChainAndPolygonContact(fixtureA, indexA, fixtureB, indexB);
}
void b2ChainAndPolygonContact::Destroy(b2Contact* contact, b2BlockAllocator* allocator)
{
((b2ChainAndPolygonContact*)contact)->~b2ChainAndPolygonContact();
allocator->Free(contact, sizeof(b2ChainAndPolygonContact));
}
b2ChainAndPolygonContact::b2ChainAndPolygonContact(b2Fixture* fixtureA, int32 indexA, b2Fixture* fixtureB, int32 indexB)
: b2Contact(fixtureA, indexA, fixtureB, indexB)
{
b2Assert(m_fixtureA->GetType() == b2Shape::e_chain);
b2Assert(m_fixtureB->GetType() == b2Shape::e_polygon);
}
void b2ChainAndPolygonContact::Evaluate(b2Manifold* manifold, const b2Transform& xfA, const b2Transform& xfB)
{
b2ChainShape* chain = (b2ChainShape*)m_fixtureA->GetShape();
b2EdgeShape edge;
chain->GetChildEdge(&edge, m_indexA);
b2CollideEdgeAndPolygon( manifold, &edge, xfA,
(b2PolygonShape*)m_fixtureB->GetShape(), xfB);
}
| {
"content_hash": "829f18dbefefa885f58342daf4d5002a",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 142,
"avg_line_length": 38.729729729729726,
"alnum_prop": 0.7857641311933008,
"repo_name": "AngeloG/MragPP",
"id": "13dd386c8937059064af3e125202126fdf2e7614",
"size": "2329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Mragpp/Box2D/Dynamics/Contacts/b2ChainAndPolygonContact.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "313485"
},
{
"name": "C++",
"bytes": "731234"
},
{
"name": "CMake",
"bytes": "18457"
},
{
"name": "GLSL",
"bytes": "500"
}
],
"symlink_target": ""
} |
Wrangler
=======
Wrangler is a Lua object filtration module written with the LÖVE framework in mind. This module will take a list of Lua objects and sort through them, removing any items that do not match the given criteria.
## Filters ##
Wrangler sorts items using functions called filters. A filter receives, in order, the Wrangler object, the object being processed, the name of the filter, and the value to compare the object with. If the object passes the filter, the filter should return **true**. Otherwise, it should return **false**.
```Lua
function filter(self, object, name, value)
return object.foo == value
end
```
By default, a Wrangler object has two filters: *not* and *or*, both of which take a separate list of criteria. Wrangler objects do not have, nor do they need, an *and* filter as criteria are implicitly exclusive.
## Usage ##
To use Wrangler, require the module into your project and create a new Wrangler object.
```Lua
local Wrangler = require("Wrangler")()
```
Once the new object is created, you can add filters to it and use it to process a list of items.
```Lua
local Wrangler = require("Wrangler")()
Wrangler:addFilter("foo", function(self, object, name, value)
return object.foo == value
end)
local t = {
{
foo = "bar",
a = "a",
2 = "two"
},
{
foo = "foo",
a = "b",
2 = "four"
}
}
Wrangler:filter(t, {
foo = "bar"
})
print(t[1] and t[1].foo) -- bar
print(t[2] and t[2].foo) -- false (t[2] is nil)
```
Note that Wrangler:filter works directly on the table you give to it, so if the table you need it to filter should not change, it is recommended that you unpack or clone it into another table to be processed instead.
For convenience, Wrangler:filter returns the table you passed to it.
| {
"content_hash": "3776df525fbd645274a66f519d18a0a0",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 303,
"avg_line_length": 33.41509433962264,
"alnum_prop": 0.7058159232072275,
"repo_name": "Apprehentice/wrangler",
"id": "8f0a1101f7b47a3b5270069a7d5e00b0b75b5c3f",
"size": "1772",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "readme.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "2891"
}
],
"symlink_target": ""
} |
const tinyColor = require('tinycolor2');
/**
* Calculate darker shade of color
*
* @param {string} color - hex value
* @param {number} percent
* @returns {string}
*/
function darken(color, percent) {
return tinyColor(color).darken(percent).toHexString();
}
/**
* Calculate lighter shade of provided color
*
* @param {string} color - hex value
* @param {number} percent
* @returns {string}
*/
function lighten(color, percent) {
return tinyColor(color).lighten(percent).toHexString();
}
module.exports = {
darken: darken,
lighten: lighten
}; | {
"content_hash": "03a3da5377836708bc311397b6265d8d",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 56,
"avg_line_length": 19.928571428571427,
"alnum_prop": 0.6899641577060932,
"repo_name": "nathanhood/postcss-js-mixins",
"id": "37fe0e3a8a2682919340813380b423fbecc7522d",
"size": "558",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/colorHelpers.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "27877"
}
],
"symlink_target": ""
} |
<html><head><META http-equiv="content-type" content="text/html;charset=iso-8859-9"/><meta name="viewport" content="width=device-width, initial-scale=1"><title>ac</title> <link rel="stylesheet" href="css/jquery.mobile-1.2.0.css" /><style type="text/css">.content-primary {text-align: left;}#header h3 {text-align: left;}</style><script src="js/jquery.js"></script><script src="js/jquery.mobile-1.2.0.js"></script></head><body><div id="page1" data-role="page"><div id="header" data-position="fixed" data-role="header" data-theme="e"><h3><strong>EBÜ'L-HASAN-I ÞAZÝLÝ
</strong></h3><a class="ui-btn-right" href="../index.html" data-role="button" data-icon="home">Geri</a></div><div data-role="content" align = "center"><div class="content-primary">On ikinci yüzyýlda Kuzey Afrika'da yetiþen büyük velilerden. Þaziliyye adý verilen tasavvuf
yolunun kurucusudur. Ýsmi, Ali bin Abdullah bin Abdülcebbar, künyesi, Ebü'l-Hasan, lakabý
Nureddin'dir. Peygamber efendimizin sallallahü aleyhi ve sellem torunu hazret-i Hasan'ýn
soyundan olup þeriftir. 1196 (H.592) senesinde Tunus'un Þazile kasabasýnda doðduðu için
Þazili nisbesiyle meþhur olmuþtur. 1256 (H.654) senesinde hac yolculuðu sýrasýnda
Hamisre'de vefat etti. Kabri, Hamisre mevkiindeki Ayzab sahrasýndadýr.
Küçük yaþtan itibaren doðduðu Þazile kasabasýnda ilim öðrenmeye baþlayan Ebü'l-Hasan-ý
Þazili, önceleri kimya ilminde uzun çalýþmalar ve araþtýrmalarda bulundu. Bu ilimde iyi
yetiþmesi için cenab-ý Hakk'a yalvararak dua ediyordu. Bu esnada, aldýðý manevi bir iþaretle,
tasavvuf yoluna yöneldi. Din ilimlerinin hepsinde mütehassýs ve derin alim oldu. Hepsinin
inceliklerine ve sýrlarýna kavuþtu. Tefsir, hadis, fýkýh, usul, nahiv, sarf, lügat ilimleri yanýnda,
zamanýn fen ilimlerinde de yüksek alim oldu. Zamanýndaki alimler ve diðer insanlar onun
ilimdeki bu yüksek derecesi karþýsýnda üstünlüðünü kabul ettiler.
Zahiri ilimlerde bu derece yüksek olan Ebü'l-Hasan-ý
Þazili hazretleri, tasavvufa karþý alaka,
ilgi duydu. Birçok velinin sohbetinde bulunup, onlardan istifade etmeye çalýþtý. Bu sebeple
pek çok seyahat yaptý. Bir defasýnda Irak'a giderek buradaki alimlerden Ebü'l-Feth Vasýti'nin
sohbetlerinde bulundu. O sýralarda zamanýn en büyük velisini arýyordu. Bir gün, Ebü'l-Feth
Vasýti hazretleri ona dönerek; "Sen onu Irak'ta arýyorsun. Halbuki aradýðýn kimse, senin
memleketindedir. Oraya dön, orada bulacaksýn." buyurunca, geri memleketine döndü.
Büyük velilerden olan Þerif Ebu Muhammed Abdüsselam Ýbn-i Meþiþ-i Haseni hazretlerinin,
aradýðý zat olduðunu anladý. Ýbn-i Meþiþ hazretleri, Rabat (Ribate)' deki bir daðda maðarada
yaþamaktaydý. Ebü'l-Hasan-ý
Þazili, onun huzuruna çýkmak için, dað eteðinde bulunan
çeþmeden gusl abdesti aldý. Kendindeki bütün meziyetleri ve üstünlükleri unutarak, yani tam
bir boþ kalb ve ihtiyaç ile huzurlarýna doðru yürüdü. Ýbn-i Meþiþ hazretleri de maðaradan
çýkmýþ, ayný
þekilde ona doðru yürüyordu. Karþýlaþtýklarýnda hocasý selam verip, Resulullah
efendimize kadar uzanan nesebini tek tek saydýktan sonra ona: "Ya Ali, bütün ilim ve
amelinizden soyunarak tam bir ihtiyaç ile buraya çýktýnýz ve bizdeki dünya ve ahiret servet ve
zenginliðini aldýnýz." buyurdu. Ebü'l-Hasan-ý
Þazili diyor ki: "Onun bu hitabýndan sonra,
bende fevkalade bir korku hasýl oldu. Hak teala kalb gözümü açýncaya kadar mübarek
huzurlarýnda oturdum. Sohbetlerine devam ettim." Ebü'l-Hasan-ý
Þazili, hocasýnýn yüksek
derecesini bildirirken þöyle buyurdu: "Bir gün hocamýn huzurunda oturuyordum. Kendi
kendime; "Acaba hocam Ýsm-i azamý biliyor mu?" dedim. Bu düþünce ile meþgul iken dýþ
kapýda bulunan oðullarý, bana bakýp; "Ey Ebü'l-Hasan-ý
Þazili, þeref ve itibar, Ýsm-i azamý
bilmekle deðil, belki Ýsm-i azama mazhar olmakladýr." dedi.
Kendisi anlattý ki: "Bir arkadaþýmla bir maðarada bulunuyor ve Allahü tealanýn muhabbetiyle
yanmayý ve O'na kavuþmaðý istiyorduk. Yarýn kalbimiz açýlýr, velilik makamlarýna kavuþuruz
derdik, yarýn olunca da, yine yarýn açýlýr derdik. Yarýnlar gelip geçiyor ve bir türlü
bitmiyordu. Bir gün birden heybetli bir zat yanýmýza girdi. Ona; "Kimsin?" dedik.
Abdülmelik'im, yani Melik olan Rabbimizin kuluyum dedi. Velilerden olduðunu anladýk.
"Nasýlsýnýz?" dedik. "Yarýn olmazsa, öbür yarýn kalbim açýlýr diyenin hali nasýl olur? Allahü
tealaya, sýrf Allah için ibadet etmedikçe, vilayet ve kurtuluþ yoktur." dedi. Bu söz üzerine
gafletten uyandýk. Tövbe ve istigfar ettik. Bunun üzerine kalblerimiz Allahü tealanýn
muhabbetiyle doldu."
Ebü'l-Hasan-ý
Þazili'nin hocasýna olan teslimiyeti tam ve mükemmel bir hale gelince,
karþýlaþacaðý birçok sýkýntýlarý, hocasý kendisine haber verdi. Þöyle vasiyet etti: "Hak tealayý
bir an unutup gaflette olma. Dilini halkýn diline ve kalbini halkýn kalbine benzetmekten sakýn,
bütün uzuvlarýn ile Ýslamiyete uy. Ýslama uygun olmýyan þeylerden sakýn. Farzlarý yerine
getirmeye devam et. Ýþte o vakit Allahü tealanýn veliliði sende tamam olur. Allahü tealanýn
haklarýný yerine getirmekten baþka hiçbir þeyi halka hatýrlatma. Ýþte o zaman vera ve takvaya
yani haram ve þüphelilerden kaçmaya tam uymuþ olursun.
Ebü'l-Hasan-ý
Þazili hazretleri Þazile kasabasýnda yerleþtikten sonra, gerçekten birçok mihnet
ve sýkýntýlara maruz kaldý. Hocalarýnýn haber verdiði sýkýntýlar açýkça meydana geldi. Sonra
Ýskenderiyye'ye yerleþti. Doðudan ve batýdan binlerce alim ve hak aþýðý ziyaret ve
sohbetlerine akýn etti. Mesela devrin büyük alimlerinden Ýzzeddin bin Abdüsselam.
Takýyyüddin bin Ýbn-i Dakik-ül-Iyd, Abdülazim Münziri, Ýbn-üs-Salah, Ýbn-ül-Hacib,
Celaleddin bin Usfur, Nebihüddin ibni Avf, Muhyiddin bin Süraka ve Muhyiddin-i Arabi'nintalebesi el-Alem Yasin bunlar arasýndaydý. Ayrýca Kadý'l-kudat Bedreddin ibni Cema'a da
sohbetlerine kavuþmakla iftihar ederlerdi. Ebü'l-Hasan-ý
Þazili hazretleri, Ebü'l-Abbas-ý
Mürsi gibi evliyanýn büyüklerinden olan birini yetiþtirmiþtir.
Ýbn-i Hacib, Ýbn-i Abdüsselam Ýzzeddin, Ýbn-i Dakik-ül-Ýyd, Abdülazim Münziri, Ýbn-i Salih
ve Ýbn-i Usfur gibi büyük alimler, Ebü'l-Hasan-ý
Þazili'nin meclisinde bulunmak arzusuyla,
Kahire'deki Kemaliye Medresesinde, muayyen vakitlerde hazýr bulunarak Þifa ve Ýbn-i
Atiyye kitaplarýný okurlardý. Dersten çýktýktan sonra da onunla beraber yaya yürürlerdi.
Ebü'l-Hasan-ý
Þazili; "Ýzzeddin bin Abdüsselam'ýn fýkýh meclisi, Abdülazim Münziri'nin
hadis meclisi, senin tasavvuf meclisinden daha kýymetli bir meclis yoktur diye bana müjde
verildi." buyurdu.
Hýzýr aleyhisselam bir gün kendisine; "Ey Ebü'l-Hasan! Allahü teala, seni kendisine dost
edinmiþtir. Kalsan da, gitsen de, O seninle beraberdir." dedi.
Bir gün Ebü'l-Hasan-ý
Þazili, zühdden, dünyaya raðbet etmemekten bahsediyordu. Fakat
üzerinde yeni ve güzel bir elbise vardý. O mecliste üzerinde eski elbiseler olan bir fakir;
kalbinden; "Ebü'l-Hasan, hem zühdden anlatýyor, hem de üzerinde yeni elbiseler var. Bu nasýl
zahidliktir? Halbuki asýl zahid benim." diye geçirdi. Bu kimsenin kalbinden geçenleri anlýyan
Ebü'l-Hasan-ý
Þazili, onu yanýna çaðýrarak; "Senin üzerindeki elbiseyi görenler, seni zahid
sanarak hürmet ederler. Bundan dolayý sende bir gurur, kibir hasýl olabilir. Halbuki benim
üzerimdeki elbiseyi görenler, zahid olduðumu anlayamazlar. Böylece ben, hasýl olacak
gururdan kurtulurum." buyurdu. Bunu dinleyen fakir, yüksek bir yere çýkarak oradaki
insanlara; "Ey insanlar!Yemin ederim ki, biraz önce kalbimden Ebü'l-Hasan hazretleri
hakkýnda uygun olmayan þeyler düþünmüþtüm. Kalbimden geçeni anlýyarak, beni huzurlarýna
çaðýrýp nasihat ettiler. Þimdi hakikatý anlamýþ bulunuyorum. Þahid olunuz ki, huzurunuzda
tövbe istigfar ediyorum." dedi. Bunun üzerine Ebü'l-Hasan-ý
Þazili o kimseye yeni bir elbise
giydirip; "Allahü teala sana seçilmiþlerin muhabbetini versin. Sana hayýrlar, bereketler ihsan
eylesin." diye dua eyledi.
Ebü'l-Hasan-ý
Þazili hazretleri; "Mýsýr'da Muhammed Hanefi isminde birisi ortaya çýkacak.
Bizim yolumuzda yürüyüp, meþhur ve büyük þan sahibi olacaktýr. Kýrmýzýya yakýn beyaz
benizlidir. Sað yanaðýnda bir ben bulunur. Gözünün beyazý çok beyaz, siyahý da tam siyahtýr.
Yetim ve fakir olarak yetiþir. Benden itibaren beþinci sýradaki halifemiz olur." buyurdu.
Gerçekten öyle olmuþtur. Vasýflarý anlatýlan Muhammed Hanefi, bu büyüklerin yolunu
Nasýrüddin ibni Melik'ten, o, dedesi Þehabüddin bin Melik'ten, o, Yakut Arþi'den, o,
Mürsi'den, o da, Þazili'den almýþtýr.
Ebü'l-Hasan-ý
Þazili, Allahü tealanýn nihayetsiz ihsan ve ikramlarýna kavuþmuþ, görünen ve
görünmeyen bütün olgunluklara eriþmiþti. Bir gün seyahate çýkmýþtý. Kendi kendine; "Ya
Rabbi! Sana ne zaman þükür edici bir kul olabilirim?" dedi. Bu sýrada gaibden bir ses; "Bana
þükür edici bir kul olabilmen için, yeryüzünde senden fazla nimet verilmiþ bir kulun
olmadýðýný düþünmelisin." diyordu. Bu sözleri iþitince; "Ya Rabbi! Kendimden fazla nimet
verilmiþ bir kimsenin olmadýðýný nasýl düþünebilirim? Zira sen, peygamberlere, alimlere,
padiþahlara herkesten fazla nimet verdin." dedi. Bu defa; "Eðer peygamberlere
(aleyhimüsselam) nimet verilmeseydi, sen doðru yolu bulamazdýn. Alimler olmasaydý,
dinden çýkýp küfre girerdin. Padiþahlar olmasa, evinde emin bir halde rahat oturabilir miydin?
Bunlarýn hepsi, sana ihsan ettiðim nimetlerden deðil midir?" buyruldu.
Ebü'l-Hasan-ý
Þazili hazretleri Resulullah efendimizi sallallahü aleyhi ve sellem rüyada
gördü. Peygamber efendimiz ona; "Ya Ali! Elbiselerini kirden temizle ki, her nefesinde
Allahü tealanýn imdadýna mazhar olasýn." buyurdu. "Ya Resulallah! Benim elbisem
hangisidir?" dedim. Buyurdu ki: "Allahü teala sana beþ hil'at giydirmiþtir. Muhabbet, tevhid,
marifet, iman ve Ýslam hil'atlarýdýr. Allahü tealaya muhabbet edene, sevene her þey kolay
olur. Allahü tealayý tanýyanýn gözünde dünyadan bir þey kalmaz. Allahü tealayý vahdaniyetle
bilen, O'na hiçbir þeyi ortak koþmaz. Allahü tealaya inanan, her þeyde emin olur. Ýslamla
sýfatlanan, Hak tealaya asi olmaz. Eðer asi olursa, af diler. Af dilerse, kabul edilir.
Ebü'l-Hasan der ki: Bu izahtan, Allahü tealanýn Kur'an-ý kerimde mealen; "Ve elbiseni
temizle." ayetinin manasýný anladým."
Ebü'l-Hasan-ý
Þazili hazretleri talebelerine nasihat ederek buyurdu ki:
"Yolumuzun esasý beþþeydir: 1) Gizli ve aþikar, her halükarda Allahü tealadan korku halinde
olmak. 2) Her hal ve ibadetinde, Peygamberimizin sallallahü aleyhi ve sellem ve Eshabýnýn
(radýyallahü anhüm) gösterdiði doðru yola uyup, bid'at ve sapýklýklardan sakýnmak. 3)
Bollukta ve darlýkta, insanlardan bir þey beklememek. 4) Aza ve çoða razý olmak. 5) Sevinçli
veya kederli günlerde cenab-ý Hakk'a sýðýnmak."
"Bizim yolumuzda olan talebe, din kardeþlerini, arkadaþlarýný, son derece merhametle
gözetmeli, onlara son derece hürmet etmelidir. Ýçlerinden birini kendisine sohbet arkadaþý
seçmeli, bu arkadaþ, gaflete düþtüðünde, seni uyandýrmalý, ibadette tenbelliðe düþtüðünde
seni heveslendirmeli, aciz kaldýðýn yerde sana yardým etmeli ve sen doðru yoldan kaydýkça
seni doðru yola çekmeli. Sana nasihat vermeli, kötü harekette bulunduðunda veya bir günah
iþlediðinde sana uymayýp vaz geçirebilecek vasýflarda olmalýdýr. Arkadaþlarýna gelebilecek
eziyetlere mani olmalýsýn. Güzel ahlak edinip, þefkat ve merhamet üzere bulunmalýsýn. Hak
tealaya, itaat ve ibadeti, bu yola hizmeti gözetmeli ve buna sýmsýký sarýlmalýsýn. Lüzumsuz
þeylerle gözü meþgul edip, gönlü daðýtmamalýsýn. Zira bu, insandaki þehvet kuvvetini
arttýrýr."
Tasavvufta en yüksek derecelere kavuþmuþ olan ve Allahü tealadan baþkasýna gönül
vermeyen, dünyadan uzak olan Ebü'l-Hasan-ý
Þazili hazretleri bir sohbeti esnasýnda buyurdu
ki:
"Biz Hak'la olunca, mahluktan hiçbirini görmeyiz. Ýnsanlýk icabý baksak bile, onlar güneþ
ýþýðýnda dalgalanan havadaki ince toz gibi görünür. Dikkatle baksan bir þey bulamazsýn."
"En büyük günahlar ikidir: Biri dünya sevgisi, diðeri bilmediði bir iþin baþýna isteyerek
geçmek."
"Dünyadan ve dünya ehlinden tamamen uzaklaþmaz isen, velilik kokusunu alamazsýn."
"Þu üç þey bir insanda mevcut olursa, ona ilmin asla bir faydasý olmaz: 1) Dünyanýn faydasýz
þeylerine aþýrý baðlýlýk. 2) Ahireti hatýrdan çýkarmak. 3) Fakir olmaktan korkmak."
Günahlardan kaçýnmak ve iyiliklere devam etmek hususunda da þöyle buyurdu:
"Kalp huzursuzluðuna tutulmamak, eleme uðramamak ve günahlardan temizlenmek istersen,
iyi ve hayýrlý iþlerini çoðalt."
"Günahlarýn baðýþlanmasý ve baþa gelen belalardan korunmak için en güzel sýðýnak,
istiðfardýr, tövbe etmekdir."
"Ýlmi arttýkça günahý artan kimse, þüphesiz ki helak içindedir."
"Allahü tealaya hakkýyla iman ve Resulüne tabi olmaktan daha büyük keramet yoktur."
"Ýki iyilik vardýr ki, onlar bulunduðu sürece, çok da olsa kötülüklerin zararý
dokunmaz. Biri cenab-ý Hakk'ýn kaza ve kaderine razý olmak, diðeri Allahü tealanýn
kullarýna iyi muamele etmek."
Ebü'l-Hasan Þazili hazretleri bir sohbetinde de buyurdu ki: "Bizim bildiðimiz ve
bildirdiðimiz bilgilerden haberi olmayan zavallýlar, büyük günahlarda ýsrar ederek devam
ettikleri halde vefat ederler. Çünkü onlar iyiliðin kýymetini, kötülüðün zararýný, yani bunlarý
anlamaya yarayan bilgileri öðrenmemiþlerdir. Böylece nefislerinin heva ve arzularýna tabi
olarak günahlara dalmýþlar ve ömürleri bu gaflet ve cahillik içinde geçip gitmiþtir."
Ebü'l-Hasan-ý
Þazili hazretlerine; "Zahirde senin öyle büyük bir kemalin, olgunluðun, bir
ibadetin olmadýðý halde bu insanlar neden sana bu derece hürmet gösteriyorlar? Bunun sebebi
nedir?" diye sorduklarýnda, Ebü'l-Hasan-ý
Þazili hazretleri buyurdu ki: "Yalnýz bir sebeple
insanlar böyle yapýyor. O da Allahü teala onu her kimseye farz kýlmýþtýr. Ben o farzý yerine
getirince, insanlar bana böyle yapýyorlar. O da dünya ehlini terk etmektir. Dünya ve ehlini
terk etmek, iþimizi gücümüzü terk etmek deðil, yalnýz dünya ve dünya ehlinin sevgisini
gönülden çýkarmaktýr. Bu mahlukatý gönlümüze sokmamak, dünyayý ve mahluku cenab-ý
Hakk'ýn muhabbetine ortak ettirmemektir. Bu insanlar acaibdir. Onlar daima dýþ görünüþe
bakarlar ve adamýn zahid, dünyaya düþkün olmadýðýný görürler. Abid, çok ibadet eden ise,
büyük kimse derler. Þüphesiz bu büyüklük ise de asýl büyüklük ve olgunluk kalpteki
olgunluktur. Zahir, görünen iþlerimiz malumdur. Yemek, içmek, yatmak, uyumak, ibadet ve
taat etmek, haramlardan sakýnmak, vesairedir. Batýnýn iþi ise, Allahü teala ile huzur
bulmaktýr. Ahlak-ý ilahiyye ile ahlaklanmaktýr. Ýnsanýn esas olgunluðu batýnladýr. Zahirde her
iþi yerli yerine yapsak fakat kalbimizde kötü ahlaktan kurtulamasak, gafil ve cahil kalarak,
cenab-ý Hakk'ýn rýzasýna kavuþabilir miyiz?"
Kendisi anlatýr: "Bir gece rüyamda hazret-i Ebu Bekr-i Sýddik'ý gördüm. Bana; "Dünya
sevgisinin kalpten çýktýðýnýn alameti nedir, biliyor musun?" diye sordu. Bilmediðimi
söyleyince; "Dünya sevgisinin kalpten çýktýðýnýn alameti; bulunca vermek, olmayýnca kalben
rahat olmaktýr." buyurdu.
Ebü'l-Hasan-ý
Þazili hazretleri insanlara nasihattan, Ýslamiyetin emir ve yasaklarýný
anlattýktan sonra kalan zamanlarýnda Allahü tealaya ibadet eder, O'nun ismini zikrederdi.
Hizbü'l-Bahr adlý kitabýndaki tesbihleri ve dualarý okur ve okuturdu. Hizbü'l-Bahr
okumanýn dertlerden, sýkýntýlardan kurtulmaya vesile olduðunu bildirirdi. Okunmasýný
istediði Hizbü'l-Bahr hakkýnda þöyle buyurdu:
Darimi'nin Müsned'inde Abdullah ibni Mes'ud (radýyallahü anh) diyor ki: "Evde Bekara
suresi baþýndan Müflihun'a kadar beþ ayet okunduðu gece, þeytan o eve girmez." Peygamber
efendimiz sallallahü aleyhi ve sellem buyurdular ki: "Bir evde, þu otuz üç ayet okunduðu
gece, yýrtýcý hayvan ve eþkýya, düþman, sabaha kadar canýna ve malýna zarar yapamaz:
Bekara suresi baþýndan beþ ayet, Ayet-el-Kürsi baþýndan "Halidun"a kadar üç ayet,
Bekara sonunda "Lillahi"den sure sonuna kadar üç ayet, A'raf suresinde, "Ýnne
Rabbeküm"den "Muhsinin"e kadar, elli beþten itibaren üç ayet, Ýsra suresi sonundaki
"Kul"den iki ayet, Saffat suresi baþýndan "Lazib"e kadar on bir ayet, Rahman
suresinde "Ya ma'þerelcin"den "Feiza"ya kadar iki ayet, Haþr suresi sonunda "lev
enzelna"dan sure sonuna kadar, Cin suresi baþýndan "Þatata"ya kadar dört ayet."
Yedi defa Fatiha okuyup, dert ve aðrý olan uzva üflenirse, þifa hasýl olur. Ayet-i kerimenin ve
duanýn tesir etmesi için, okuyanýn ve okutanýn Ehl-i sünnet itikadýnda olmasý, haram
iþlemekten, kul hakkýndan sakýnmasý, haram ve habis þey yiyip içmemesi ve karþýlýk olarak
ücret istememesi þarttýr.
Bazýlarý bu kitaba itiraz edince; "Yemin ederim ki, bu kitabý harf be harf, harfi harfine
Resulullah'ýn mübarek aðzýndan, rüyada iþitip yazdým." buyurdu.
Ebu Abdullah anlattý: "Ben, Ebü'l-Hasan-ý
Þazili hazretlerini çok sever ve her sýkýntýmda
Allahü tealaya onu vesile ederek dua ederdim. Cenab-ý Hak da bütün istek ve ihtiyaçlarýmý
onun hürmetine ihsan eder, verirdi. Bir gün Resulullah efendimize rüyada, "Ya Resulallah!
Siz Ebü'l-Hasan-ý
Þazili'den razý mýsýnýz? Ben, her ne ihtiyacým olursa, onu vesile ederek
Allahü tealadan isterim ve bütün ihtiyaçlarým yerine gelir." dedim. Bunun üzerine Peygamber
efendimiz; "Ebü'l-Hasan benim evladýmdýr. Bütün evladlarda, babalarýnýn bir cüz'ü bulunur.
Her kim ki benim bir cüz'üme temessük ederse, onu vesile ederse, benim bütünüm ile
temessük etmiþ olur. Sen, Ebü'l-Hasan'ý vesile ederek Allahü tealadan bir þey istediðin
zaman, beni vesile ederek Allahü tealadan istemiþ olursun." buyurdu.
Ebü'l-Abbas-ý Mürsi þöyle anlattý: "Cenab-ý Hakk'a yemin ederim ki, her ne zaman bir
felaketle karþýlaþtým ve müþkilata uðradýmsa, hocam Ebü'l-Hasan-ý
Þazili'yi imdada çaðýrýp,
kurtuldum. Ey kardeþim! Sen de bir sýkýntýya düþersen, hemen onun ismini an ve kurtul.
Allahü teala bilir ki, sana doðru bir nasihat veriyorum."
Yine Ebü'l-Abbas anlattý: "Bir gün hocam Ebü'l-Hasan hazretlerinin arkasýnda namaz
kýlýyordum. Beni hayretlere düþüren hallere þahid olup, þunlarý gördüm. Hocamýn
vücudundan o kadar çok ve parlak nurlar çýkýyordu ki, onlara bakamýyordum."
Ebü'l-Hasan-ý
Þazili rahmetullahi aleyh þöyle anlattý: "Ayzad Sahrasýnda yolculuk
yapýyordum. Hýzýr aleyhisselam ile karþýlaþtým. Bana; "Ey Ebü'l-Hasan! Allahü teala sana
lütufta bulundu. Hazerde de seferde de senin arkadaþýn var. Ben hep senin yanýnda
bulunuyorum." dedi.
Ebü'l-Hasan-ý
Þazili hazretleri hemen her sene hac ibadetini yerine getirmek üzere Mekke-i
mükerremeye giderdi. Ayný zamanda Medine-i münevvereye giderek sevgili
Peygamberimizin kabr-i þerifini ziyaret ederdi. Bir sene talebelerinden Ebü'l-Abbas-ý Mürsi
onunla bulunduðu sýradaki bir hadiseyi þöyle anlattý:
Hocam Ebü'l-Hasan ile birlikte Medinetürresul'de yani Medine-i münevverede bulunuyorduk.
Bu arada ben, hazret-i Hamza'nýn kabrini ziyaret etmek istedim. Medine-i münevvereden
ayrýldým. Benimle beraber birisi de oraya gidiyordu. Hazret-i Hamza'nýn kabrine vardýk.
Kapýsý kapalý idi. Fakat Resulullah'ýn sallallahü aleyhi ve sellem bereketiyle kapý açýldý. Ýçeri
girdik. Ýçeride velilerden biri vardý. Benimle beraber gelen þahsa; "Allahü tealadan ne dileðin
varsa iste, çünkü þu anda yapýlan dua kabul olur." dedim. Ancak bu þahýs, duasýnda Allahü
tealadan bin dirhem istedi. Medine'ye dönünce biri kendisine bin dirhem verdi. Bu þahýs,
Ebü'l-Hasan'ýn huzuruna girince, hazret-i Hamza'nýn kabrine beraber gittiðimiz zata; "Ey
Batla! Ýcabet vaktine, duanýn kabul olacaðý vakte rastladýn. Fakat Allahü tealadan bin dinar
istedin. Keþke, Allahü tealadan Ebü'l-Abbas'ýn istediði gibi isteseydin. O, Allahü tealadan;
kendisini dünya düþüncesinden muhafaza buyurmasýný ve ahiret azabýndan kurtarmasýný
diledi ve bu dilekleri kabul oldu." buyurdu.
Arabistan'daki Hicaz halký gibi buðday tenli ve uzunca boylu olan Ebü'l-Hasan-ý
Þazili
hazretleri, konuþmalarýndaki fesahat ve tatlýlýk, açýklýk ve vecizlik bakýmýndan, Hicazlý
olmamasýna raðmen, Hicazlý zannedilirdi. Tasavvufta Sýrri-yi Sekati ve Seyyid Ahmed
Rýfai'nin rahmetullahi aleyhima yollarýndan feyz aldý. Ýbn-i Meþiþ-i Haseni'nin hizmetinde ve
sohbetinde bulunarak velilik derecesine kavuþtu. Tefsir, hadis, fýkýh, usul, nahiv, sarf, lügat
ve zamanýn fen ilimlerinde de son derece yüksek olan Ebü'l-Hasan-ý
Þazili hazretleri; "Her
istediðim zaman, Resulullah efendimizi, baþ gözümle görmezsem, kendimi O'nun ümmeti
saymam." buyurarak tasavvuftaki derecesini ifade etmiþtir.
Ýnsanlara bir sohbeti sýrasýnda; "Allahü teala sözlerinde doðru ve iþlerinde ihlaslý olana
dünyada yaðmur gibi rýzýk verir. Onu kötülüklerden korur. Ahirette de günahlarýný affedip,
baðýþlar. Ona yakýn olur. Cennet'ine koyar ve yüksek derecelere kavuþturur. Kendi
kusurlarýný
ýslah etmek istersen, insanlarýn kusurlarýný araþtýrma. Çünkü hüsn-i zan, iman
þubelerinden olduðu gibi, insanlarýn ayýplarýný araþtýrmak da münafýklýktandýr. Kýyamet
günü, yol gösteren nur içinde haþrolunup karanlýktan korunmak istersen Allahü tealanýn hiç
bir mahlukuna zulmetme." buyuran Ebü'l-Hasan-ý
Þazili hazretleri, sonuncu defa hac
yolculuðuna çýktý. Bu seyahatinde talebesine, yanýna bir kazma, bir ibrik ve bir de kafur
almasýný emretti. Bunlarý niçin aldýrdýðýný soran talebesine; "Hamisre'ye varýnca anlarsýn."
buyurdu. Talebesi bilahare þöyle anlattý: Sahra-i Ayzab'da Hamisre'ye vardýk. Ebü'l-Hasan-ý
Þazili hazretleri, gusl ederek iki rekat namaz kýldý. Sonra seccadede ruhunu teslim etti.
Yanlarýna aldýklarý kazma ile mezar kazýlýp, ibrikle su taþýnýp yýkandýktan sonra, kafur konup
hemen oraya defnedildi. Vefat ettiði yerin suyu tuzlu olduðundan bir þey yetiþmezdi. Oraya
definlerinden sonra, vücudlarýnýn bereketiyle o yerin suyu tatlýlaþtý ve münbit bir yer haline
geldi."
Ebü'l-Hasan-ý
Þazili hazretlerinin þu eserleri vardýr: 1) Hizbü'l-Bahr: Kýymetli bir dua
kitabýdýr. 2) El-Ýhtisas min-el-Kavaidi'l-Kur'aniyye vel-Havas, 3) Risaletü'l-Eminli-Yencezibe li-Rabbi'l-Alemin, 4) El-Cevahirü'l-Masune, 5) El-Leali'l-Meknune, 6)
Kýyafetü't-Talibi'r-Rabbani li-Risaleti Ebu Zeyd el-Kayravani, 7)
El-Mukaddimetü'l-Ýzziyye lil-Cemaati'l-Ezheriyye.
ALTIN OLAN TAÞ
Ebü'l-Hasan-ý
Þazili, memleketinden Ýskenderiyye'ye geldiðinde, o zamanýn sultaný bir mektup
yazarak kendisini davet etti. Sultan, daveti kabul edip gelen Ebü'l-Hasan'a çok izzet ve ikram gösterip
hürmette bulundu. Sonra Ýskenderiyye'ye, büyük bir saygýyla uðurladý. Sultana, bir müddet sonra
Ebü'l-Hasan-ý
Þazili aleyhinde iftiralarda bulundular. Öyle ki, sultan çok kýzýp, muhafýzýna, onu
öldürme emrini verdi. Muhafýz, Ýskenderiyye'ye, Ebü'l-Hasan'ýn huzuruna gelip sultanýn emrini
bildirdi ve; "Efendim, benim size çok hürmetim ve muhabbetim vardýr. Sizin, Allahü tealanýn sevgili
kullarýndan olduðunuza inanýyorum. Öyle bir þey yapýnýz ve söyleyiniz ki, sultan bu kararýndan
vazgeçsin." dedi. Bu sözleri dinleyen Ebü'l-Hasan-ý
Þazili dýþarý çýktý. Muhafýz da onu takib etti.
Muhafýza dedi ki: "Þu taþa bakýnýz!" Muhafýz, biraz önce taþ olarak gördüðü cismin, þimdi altýn
olduðunu görerek hayret etti. Taþ, Allahü tealanýn izniyle Ebü'l-Hasan-ý
Þazili'nin teveccühleri ile
altýn olmuþtu. Muhafýza; "Bu taþý alýp sultana götürünüz. Beyt-ül-mal hazinesine koysun." buyurdu.
Muhafýz altýný alýp sultanýn huzuruna gitti ve iftira durumunu anlattý. Bu hadise üzerine sultan,
Ýskenderiyye'ye kadar gelip Ebü'l-Hasan-ý
Þazili'yi ziyaret etti. Özür diledi ve ona pekçok mal ve
erzak gönderip, ihsanlarda bulundu. Fakat Þazili hazretleri hiçbir þey kabul etmeyip; "Biz
Rabbimizden baþka hiç kimseden bir þey istemeyiz." buyurdu.
SOHBETÝN EHEMMÝYETÝ
Ebü'l-Hasan-ý
Þazili hazretlerinin talebelerinden birisi, tasavvuf yolundaki dereceleri geçerken kendini
hocasý gibi görmeye baþladý. Neye baksa Þeyhini görüyordu. Bu sebeple Ebü'l-Hasan-ý
Þazili'nin
sohbetlerine gelmemeye baþladý. Bir gün Ýmam-ý
Þazili hazretleri yolda giderken talebesiyle karþýlaþtý
ve; "Caným sen nerede kaldýn. Sohbetlere gelmiyorsun!" buyurdu. Talebe; "Efendim, sizinle sözden
müstaðni oldum. Yani her an sizi karþýmda görüyorum ve kendimi sizin suretinizde görüyorum.
Sohbetinize gelmeye ihtiyaç duymuyorum." dedi. Bu cevap üzerine Ebü'l-Hasan-ý
Þazili hazretleri
buyurdu ki: "Çok garib. Eðer iþ senin söylediðin gibi olsaydý, hazret-i Ebu Bekr'in Resulullah
efendimizin sohbetlerine gitmemeleri gerekirdi. Eðer sohbetten müstaðni olsaydý, hazret-i Ebu Bekr
efendimiz müstaðni olurdu."
1) Menakýb-ý Ebi'l-Hasan Þazili lil-Fasi
2) Ebü'l-Hasan-ý
Þazili (Ali Salim Ammar)
3) Tabakatü'l-Evliya; s.458
4) Tabakatü'l-Kübra; c.2, s.4
5) Mu'cemü'l-Müellifin; c.7, s.137
6) Þezeratü'z-Zeheb; c.5, s.278
7) Kevakibü'd-Düriyye
8) Esmaü'l-Müellifin; c.1, s.79
9) Hüsnü'l-Muhadara; c.1, s.298
10) Camiu Keramati'l-Evliya; c.2, s.175
11) Ebü'l-Hasan-ý
Þazili (Dr. Abdulhalim Mahmud)
12) Ravdü'r-Reyyahin; s.177, 272
13) Tam Ýlmihal Seadet-i Ebediyye (49. Baský) ; s.1071
14) Ýslam Alimleri Ansiklopedisi; c.8, s.226, c.11, s.76, 177, c.17, s.175, 195
15) Letaifü'l-Minen
16) The Müslim World Sene 12, Sayý; 179, 257
17) Sohbetname; c.1, s.88, 123
<br></div></body></html>
| {
"content_hash": "ef3b23b13c4221f745c9641bbeaa0db0",
"timestamp": "",
"source": "github",
"line_count": 417,
"max_line_length": 564,
"avg_line_length": 59.90407673860911,
"alnum_prop": 0.7984787830264212,
"repo_name": "relmas/evliya",
"id": "b352f41de3d08bfeb4c1bb17097bc952f4291ddb",
"size": "24980",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "www/sayfalar/633.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "227452"
},
{
"name": "JavaScript",
"bytes": "296010"
}
],
"symlink_target": ""
} |
namespace WebApplication1
{
using System;
using System.Collections.Generic;
public partial class SalesTerritory
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public SalesTerritory()
{
this.SalesPerson = new HashSet<SalesPerson>();
}
public int TerritoryID { get; set; }
public string Name { get; set; }
public string CountryRegionCode { get; set; }
public string Group { get; set; }
public decimal SalesYTD { get; set; }
public decimal SalesLastYear { get; set; }
public decimal CostYTD { get; set; }
public decimal CostLastYear { get; set; }
public System.Guid rowguid { get; set; }
public System.DateTime ModifiedDate { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<SalesPerson> SalesPerson { get; set; }
}
}
| {
"content_hash": "ccc2686e9ae6c301e4cda38e0a677850",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 128,
"avg_line_length": 38.285714285714285,
"alnum_prop": 0.6511194029850746,
"repo_name": "Dhiraj3005/Mastering-C-Sharp-and-.NET-Framework",
"id": "3d74a0d314f4d9ab3d68abf799557c63150d8022",
"size": "1496",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Chapter09/WebApplication1/WebApplication1/SalesTerritory.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "522"
},
{
"name": "C#",
"bytes": "448327"
},
{
"name": "CSS",
"bytes": "12183"
},
{
"name": "F#",
"bytes": "6227"
},
{
"name": "HTML",
"bytes": "41706"
},
{
"name": "JavaScript",
"bytes": "5861533"
},
{
"name": "PowerShell",
"bytes": "447357"
},
{
"name": "TypeScript",
"bytes": "3374"
}
],
"symlink_target": ""
} |
<!doctype html>
<title>Chunked response / Etag/ cache-control: must-revalidate </title>
<pre>FAIL (script didn't run)</pre>
<p><video controls></video>
<script>
var p = document.querySelector('pre');
var log = [];
var video = document.querySelector('video');
// load video
video.src = '../range-request-log/range-request.php?etag=yes&cachecontrol=must-revalidate&status=200%20OK&acceptranges=no&contentrange=no&chunked=yes&rate=10000';
video.addEventListener('loadedmetadata', function() {
end();
}, false);
setTimeout(function() {
log.push('timed out');
end();
}, 10000);
function end() {
if (arguments.callee.done)
return;
arguments.callee.done = true;
var passed = log.length == 0;
p.textContent = passed ? 'PASS' : 'FAIL\n' + log.join('\n');
try{top.opener.rr(passed)}catch(e){}
}
</script>
| {
"content_hash": "256990e92a8bd34097017c062d8accc9",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 162,
"avg_line_length": 26.451612903225808,
"alnum_prop": 0.6853658536585366,
"repo_name": "operasoftware/presto-testo",
"id": "b8451034048ef3ac33c307bf822ff3c080e4c4eb",
"size": "820",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "core/standards/web-apps/media/network/cache/022.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "2312"
},
{
"name": "ActionScript",
"bytes": "23470"
},
{
"name": "AutoHotkey",
"bytes": "8832"
},
{
"name": "Batchfile",
"bytes": "5001"
},
{
"name": "C",
"bytes": "116512"
},
{
"name": "C++",
"bytes": "279128"
},
{
"name": "CSS",
"bytes": "208905"
},
{
"name": "Groff",
"bytes": "674"
},
{
"name": "HTML",
"bytes": "106576719"
},
{
"name": "Haxe",
"bytes": "3874"
},
{
"name": "Java",
"bytes": "185827"
},
{
"name": "JavaScript",
"bytes": "22531460"
},
{
"name": "Makefile",
"bytes": "13409"
},
{
"name": "PHP",
"bytes": "524372"
},
{
"name": "POV-Ray SDL",
"bytes": "6542"
},
{
"name": "Perl",
"bytes": "321672"
},
{
"name": "Python",
"bytes": "954636"
},
{
"name": "Ruby",
"bytes": "1006850"
},
{
"name": "Shell",
"bytes": "12140"
},
{
"name": "Smarty",
"bytes": "1860"
},
{
"name": "XSLT",
"bytes": "2567445"
}
],
"symlink_target": ""
} |
[](https://cocoapods.org/pods/FaveButton)
[](http://cocoapods.org/pods/FaveButton)
[](https://github.com/xhamr/fave-button)
[](https://codebeat.co/projects/github-com-xhamr-fave-button)
[](https://travis-ci.org/xhamr/fave-button)
Favorite Animated Button written in Swift

## Requirements
- iOS 8.0+
- Xcode 7.3
## Installation
For manual instalation, drag Source folder into your project.
os use [CocoaPod](https://cocoapods.org) adding this line to you `Podfile`:
```ruby
pod 'FaveButton', '~> 1.2.1' swift 2.2
pod 'FaveButton', '~> 2.0.0' swift 3
```
for [Carthage](https://github.com/Carthage/Carthage) users, add this line to you `Cartfile`
```ruby
github "xhamr/fave-button"
```
## Usage
#### With storyboard or xib files
1) Create a Button that inherits from `FaveButton`
2) Add Image for a `Normal` state
3) Set the `IBOutlet` delegate property to a subclass of `FaveButtonDelegate`
4) ___Optional___ manipulate porperties to change button settings
```swift
@IBInspectable public var normalColor: UIColor
@IBInspectable public var selectedColor: UIColor
@IBInspectable public var dotFirstColor: UIColor
@IBInspectable public var dotSecondColor: UIColor
@IBInspectable public var circleFromColor: UIColor
@IBInspectable public var circleToColor: UIColor
```
5) ___Optional___ respond to delegate methods
```swift
func faveButton(faveButton: FaveButton, didSelected selected: Bool)
func faveButtonDotColors(faveButton: FaveButton) -> [DotColors]?
```
#### In Code
```swift
let faveButton = FaveButton(
frame: CGRect(x:200, y:200, width: 44, height: 44),
faveIconNormal: UIImage(named: "heart")
)
faveButton.delegate = self
view.addSubview(faveButton)
```
## Manipulating dot colors
If you want differents colors for dots like `Twitter’s Heart Animation` use the delegate method for the button you want.
```swift
func faveButtonDotColors(_ faveButton: FaveButton) -> [DotColors]?{
if faveButton == myFaveButton{
// return dot colors
}
return nil
}
```
in [FaveButtonDemo](https://github.com/xhamr/fave-button/tree/master/FaveButtonDemo) you will find a set of color to cause dots appear like this:

## Credits
FaveButton was inspired by Twitter’s Like Heart Animation within their [App](https://itunes.apple.com/us/app/twitter/id333903271)
## Licence
FaveButton is released under the MIT license.
| {
"content_hash": "c858f7864dd21a3439e45c1306fd1d28",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 145,
"avg_line_length": 25.596491228070175,
"alnum_prop": 0.7402330363262508,
"repo_name": "saqibdb/SQBEmojiSDK",
"id": "fe35f44d06a61ca81545ea3ab09fb1df1ea8f06c",
"size": "2936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Pods/FaveButton/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "24852"
},
{
"name": "Ruby",
"bytes": "531"
},
{
"name": "Swift",
"bytes": "218"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<title>DM_SETITEMPOSITION</title>
<meta http-equiv="Content-Type" Content="text/html; charset=utf-8">
<link rel="stylesheet" type="text/css" href="../../../styles/styles.css">
<script language="javascript" src='../../links.js' type="text/javascript"></script>
</head>
<body>
<h1>DM_SETITEMPOSITION</h1>
<div class=navbar>
<a href="index_dm.html">Messages</a> |
<a href="../index.html">Dialog API</a><br>
</div>
<div class=shortdescr>
The <dfn>DM_SETITEMPOSITION</dfn> message is sent to the dialog manager
to change the position of a dialog item.
</div>
<h3>Param1</h3>
<div class=descr>
The ID of the dialog item for which the position is changed.
</div>
<h3>Param2</h3>
<div class=descr>
Pointer to a <a href="../../winapi/small_rect.html">SMALL_RECT</a> structure
containing the new item coordinates.
</div>
<h3>Return</h3>
<div class=descr>
TRUE - item position has been changed.<br>
FALSE - item wiht such ID does not exist.
</div>
<h3>Controls</h3>
<div class=descr>
<table class="cont">
<tr class="cont"><th class="cont" width="40%">Control</th><th class="cont" width="60%">Description</th></tr>
<tr class="cont"><td class="cont" width="40%">All</td>
<td class="cont" width="60%">All dialog items</td></tr>
</table>
</div>
<div class=see>See also:</div><div class=seecont>
<a href="../dialogex.html">DialogEx</a><br>
<a href="dm_getitemposition.html">DM_GETITEMPOSITION</a><br>
<a href="dm_resizedialog.html">DM_RESIZEDIALOG</a><br>
</div>
</body>
</html>
| {
"content_hash": "0a2eb1d0411cba24c164649bc15a2fe7",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 108,
"avg_line_length": 27.228070175438596,
"alnum_prop": 0.6842783505154639,
"repo_name": "johnd0e/FarManager",
"id": "4c87da177b1bd6b41f734961cdfe90875f0df5fc",
"size": "1552",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "enc/enc_eng/meta/dialogapi/dmsg/dm_setitemposition.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Ada",
"bytes": "89080"
},
{
"name": "Assembly",
"bytes": "179888"
},
{
"name": "Awk",
"bytes": "4387"
},
{
"name": "Batchfile",
"bytes": "18149"
},
{
"name": "C",
"bytes": "2358555"
},
{
"name": "C#",
"bytes": "54113"
},
{
"name": "C++",
"bytes": "10547763"
},
{
"name": "CLIPS",
"bytes": "6933"
},
{
"name": "CSS",
"bytes": "57424"
},
{
"name": "DIGITAL Command Language",
"bytes": "13234"
},
{
"name": "HTML",
"bytes": "7508803"
},
{
"name": "JavaScript",
"bytes": "4072"
},
{
"name": "Lua",
"bytes": "585588"
},
{
"name": "M4",
"bytes": "1360872"
},
{
"name": "Makefile",
"bytes": "18367"
},
{
"name": "Module Management System",
"bytes": "1545"
},
{
"name": "MoonScript",
"bytes": "1406"
},
{
"name": "Objective-C",
"bytes": "241098"
},
{
"name": "PHP",
"bytes": "246"
},
{
"name": "Pascal",
"bytes": "225209"
},
{
"name": "PowerShell",
"bytes": "5830"
},
{
"name": "Python",
"bytes": "28891"
},
{
"name": "Roff",
"bytes": "4486"
},
{
"name": "SAS",
"bytes": "1711"
},
{
"name": "Shell",
"bytes": "17682"
}
],
"symlink_target": ""
} |
package org.jetbrains.jps.model.java.impl;
import com.intellij.openapi.util.Comparing;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jps.model.JpsUrlList;
import org.jetbrains.jps.model.ex.JpsCompositeElementBase;
import org.jetbrains.jps.model.impl.JpsUrlListRole;
import org.jetbrains.jps.model.java.JpsJavaModuleExtension;
import org.jetbrains.jps.model.java.LanguageLevel;
/**
* @author nik
*/
public class JpsJavaModuleExtensionImpl extends JpsCompositeElementBase<JpsJavaModuleExtensionImpl> implements JpsJavaModuleExtension {
private static final JpsUrlListRole JAVADOC_ROOTS_ROLE = new JpsUrlListRole("javadoc roots");
private static final JpsUrlListRole ANNOTATIONS_ROOTS_ROLE = new JpsUrlListRole("annotation roots");
private String myOutputUrl;
private String myTestOutputUrl;
private boolean myInheritOutput;
private boolean myExcludeOutput;
private LanguageLevel myLanguageLevel;
public JpsJavaModuleExtensionImpl() {
myContainer.setChild(JAVADOC_ROOTS_ROLE);
myContainer.setChild(ANNOTATIONS_ROOTS_ROLE);
}
private JpsJavaModuleExtensionImpl(JpsJavaModuleExtensionImpl original) {
super(original);
myOutputUrl = original.myOutputUrl;
myTestOutputUrl = original.myTestOutputUrl;
myLanguageLevel = original.myLanguageLevel;
}
@NotNull
@Override
public JpsJavaModuleExtensionImpl createCopy() {
return new JpsJavaModuleExtensionImpl(this);
}
@Override
public JpsUrlList getAnnotationRoots() {
return myContainer.getChild(ANNOTATIONS_ROOTS_ROLE);
}
@Override
public JpsUrlList getJavadocRoots() {
return myContainer.getChild(JAVADOC_ROOTS_ROLE);
}
@Override
public String getOutputUrl() {
return myOutputUrl;
}
@Override
public void setOutputUrl(String outputUrl) {
if (!Comparing.equal(myOutputUrl, outputUrl)) {
myOutputUrl = outputUrl;
fireElementChanged();
}
}
@Override
public String getTestOutputUrl() {
return myTestOutputUrl;
}
@Override
public void setTestOutputUrl(String testOutputUrl) {
if (!Comparing.equal(myTestOutputUrl, testOutputUrl)) {
myTestOutputUrl = testOutputUrl;
fireElementChanged();
}
}
@Override
public LanguageLevel getLanguageLevel() {
return myLanguageLevel;
}
@Override
public void setLanguageLevel(LanguageLevel languageLevel) {
if (!Comparing.equal(myLanguageLevel, languageLevel)) {
myLanguageLevel = languageLevel;
fireElementChanged();
}
}
public void applyChanges(@NotNull JpsJavaModuleExtensionImpl modified) {
setLanguageLevel(modified.myLanguageLevel);
setInheritOutput(modified.myInheritOutput);
setExcludeOutput(modified.myExcludeOutput);
setOutputUrl(modified.myOutputUrl);
setTestOutputUrl(modified.myTestOutputUrl);
}
@Override
public boolean isInheritOutput() {
return myInheritOutput;
}
@Override
public void setInheritOutput(boolean inheritOutput) {
if (myInheritOutput != inheritOutput) {
myInheritOutput = inheritOutput;
fireElementChanged();
}
}
@Override
public boolean isExcludeOutput() {
return myExcludeOutput;
}
@Override
public void setExcludeOutput(boolean excludeOutput) {
if (myExcludeOutput != excludeOutput) {
myExcludeOutput = excludeOutput;
fireElementChanged();
}
}
}
| {
"content_hash": "eb3d0d3530f0be8eb579f0f40f063cf3",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 135,
"avg_line_length": 27.137096774193548,
"alnum_prop": 0.7533432392273403,
"repo_name": "lshain-android-source/tools-idea",
"id": "9852e91e60b229c787f57e4badaa1aaf0555d703",
"size": "3965",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "jps/model-impl/src/org/jetbrains/jps/model/java/impl/JpsJavaModuleExtensionImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "182"
},
{
"name": "C",
"bytes": "172277"
},
{
"name": "C#",
"bytes": "390"
},
{
"name": "C++",
"bytes": "77776"
},
{
"name": "CSS",
"bytes": "11575"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "FLUX",
"bytes": "57"
},
{
"name": "Groovy",
"bytes": "1838147"
},
{
"name": "J",
"bytes": "5050"
},
{
"name": "Java",
"bytes": "117312672"
},
{
"name": "JavaScript",
"bytes": "112"
},
{
"name": "Objective-C",
"bytes": "19631"
},
{
"name": "Perl",
"bytes": "6549"
},
{
"name": "Python",
"bytes": "2787996"
},
{
"name": "Shell",
"bytes": "68540"
},
{
"name": "XSLT",
"bytes": "113531"
}
],
"symlink_target": ""
} |
<?php
use Base\User as BaseUser;
/**
* Skeleton subclass for representing a row from the 'user' table.
*
*
*
* You should add additional methods to this class to meet the
* application requirements. This class will only be generated as
* long as it does not already exist in the output directory.
*
*/
class User extends BaseUser
{
}
| {
"content_hash": "29bf818be0729a5bde437e3083288f52",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 66,
"avg_line_length": 19.27777777777778,
"alnum_prop": 0.7146974063400576,
"repo_name": "joseluisq/codeigniter3-hmvc-boilerplate",
"id": "5590f0b5671904686c01fad75757573ce3a7e524",
"size": "347",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "orm/classes/User.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "12259"
},
{
"name": "HTML",
"bytes": "2227"
},
{
"name": "PHP",
"bytes": "221355"
}
],
"symlink_target": ""
} |
<?xml version="1.0"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
<parent>
<artifactId>sakai-gradebook-app</artifactId>
<groupId>org.sakaiproject</groupId>
<version>10.4</version>
<relativePath>../pom.xml</relativePath>
</parent>
<groupId>org.sakaiproject</groupId>
<artifactId>sakai-gradebook-standalone-app</artifactId>
<name>sakai-gradebook-standalone-app</name>
<dependencies>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.4.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
</dependency>
<dependency>
<groupId>org.javassist</groupId>
<artifactId>javassist</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>org.sakaiproject.kernel</groupId>
<artifactId>sakai-kernel-api</artifactId>
</dependency>
<dependency>
<groupId>org.sakaiproject.kernel</groupId>
<artifactId>sakai-component-manager</artifactId>
</dependency>
<dependency>
<groupId>org.sakaiproject</groupId>
<artifactId>sakai-gradebook-app-ui</artifactId>
</dependency>
<dependency>
<groupId>org.sakaiproject</groupId>
<artifactId>sakai-gradebook-app-business</artifactId>
</dependency>
<!-- Needed for unit testing with Hibernate 3. -->
<dependency>
<groupId>antlr</groupId>
<artifactId>antlr</artifactId>
<version>2.7.6rc1</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.sakaiproject.edu-services.gradebook</groupId>
<artifactId>gradebook-service-api</artifactId>
</dependency>
<dependency>
<groupId>org.sakaiproject.edu-services.gradebook</groupId>
<artifactId>gradebook-service-hibernate</artifactId>
</dependency>
<dependency>
<groupId>org.sakaiproject.edu-services.gradebook</groupId>
<artifactId>gradebook-service-impl</artifactId>
</dependency>
<!-- Section Awareness -->
<dependency>
<groupId>org.sakaiproject.edu-services.sections</groupId>
<artifactId>sections-api</artifactId>
</dependency>
<!-- Standalone component -->
<dependency>
<groupId>org.sakaiproject.edu-services.sections</groupId>
<artifactId>sections-impl-standalone</artifactId>
</dependency>
<!-- Used only for test data. -->
<dependency>
<groupId>org.sakaiproject.edu-services.sections</groupId>
<artifactId>sections-integrationsupport</artifactId>
</dependency>
<!-- this is required for any Sakai JSF tool -->
<dependency>
<groupId>org.sakaiproject.jsf</groupId>
<artifactId>jsf-tool</artifactId>
</dependency>
<!-- sakai2 plugin -->
<!--
The Gradebook itself has no need of a Uuid generator.
The following is included as a dependency of the standalone Sections
integration support JAR.
-->
<!-- Uuid generator -->
<!--org.sakaiprojectsakai-util-api${sakai.version}-->
<!--org.sakaiprojectsakai-user-api${sakai.version}provided-->
<!--
sakai-util-impl is a part of the kernel implementation, p:dependency removed
org.sakaiprojectsakai-util-impl${sakai.version}-->
<!-- This should be part of the test framework -->
<dependency>
<groupId>org.sakaiproject.kernel</groupId>
<artifactId>sakai-kernel-private</artifactId>
<version>${sakai.kernel.version}</version>
<scope>test</scope>
</dependency>
<!-- End Sakai2 Dependencies -->
<!-- Database dependencies used for integration testing and schema generation -->
<!-- Please contact the gradebook development team before removing these -->
<!-- Uncomment as necessary to run tests, or to generate new db schemas -->
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
</dependency>
<!-- Since none of these JDBC libraries will be used except in standalone test -->
<!-- builds, we specify the '_g' Oracle JAR to obtain more debugging information. -->
<!-- The normal 'ojdbc14.jar' can be used instead, however. -->
<!--
<dependency>
<groupId>oracle</groupId>
<artifactId>oracle</artifactId>
<jar>ojdbc14_g.jar</jar>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>3.1.12</version>
</dependency>
-->
<!-- End database dependencies -->
<dependency>
<groupId>${sakai.spring.groupId}</groupId>
<artifactId>${sakai.spring.test.artifactId}</artifactId>
<version>${sakai.spring.test.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>aopalliance</groupId>
<artifactId>aopalliance</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>jta</groupId>
<artifactId>jta</artifactId>
<version>h2.1.8</version>
</dependency>
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib-nodep</artifactId>
<version>2.1_3</version>
</dependency>
<dependency>
<groupId>odmg</groupId>
<artifactId>odmg</artifactId>
<version>3.0</version>
</dependency>
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>${sakai.commons.lang.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.1</version>
</dependency>
<dependency>
<groupId>commons-digester</groupId>
<artifactId>commons-digester</artifactId>
<version>1.6</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.7.0</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${sakai.commons-logging.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>com.thoughtworks.xstream</groupId>
<artifactId>xstream</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<dependency>
<groupId>dom4j</groupId>
<artifactId>dom4j</artifactId>
<version>1.4</version>
</dependency>
<!-- MyFaces JSF -->
<dependency>
<groupId>org.apache.myfaces.core</groupId>
<artifactId>myfaces-api</artifactId>
<version>1.1.5</version>
</dependency>
<dependency>
<groupId>org.apache.myfaces.core</groupId>
<artifactId>myfaces-impl</artifactId>
<version>1.1.5</version>
</dependency>
<dependency>
<groupId>org.apache.myfaces.tomahawk</groupId>
<artifactId>tomahawk</artifactId>
<version>1.1.11</version>
</dependency>
<!-- Needed by MyFaces ExtensionsFilter -->
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>1.1.1</version>
</dependency>
<!-- base64 used to implement STATE_SAVING_METHOD client -->
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>jstl</artifactId>
</dependency>
<dependency>
<groupId>taglibs</groupId>
<artifactId>standard</artifactId>
<version>1.1.2</version>
</dependency>
<!--javax.servletservlet-api${sakai.servletapi.version}-->
<dependency>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</dependency>
<!-- Sakai tag library -->
<dependency>
<groupId>org.sakaiproject.jsf</groupId>
<artifactId>myfaces-widgets-depend</artifactId>
<type>pom</type>
</dependency>
<dependency>
<groupId>org.sakaiproject.jsf</groupId>
<artifactId>jsf-spreadsheet</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>process-resources</phase>
<configuration>
<tasks>
<echo message="Copying shared resources into webapp" />
<copy todir="${project.build.directory}/${project.build.finalName}" overwrite="true">
<fileset dir="${basedir}/../ui/src/webapp" />
</copy>
</tasks>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<includes>
<include>**/*TestSuite.java</include>
</includes>
<!-- FIXME: Gradebook tests are disabled theres since they dont all pass
Please Fix -->
<!-- <skip>true</skip>-->
</configuration>
</plugin>
</plugins>
<resources>
<resource>
<directory>${basedir}/../ui/src/bundle</directory>
</resource>
<resource>
<directory>src/resources</directory>
</resource>
</resources>
<testResources>
<testResource>
<directory>${basedir}/../ui/src/webapp/WEB-INF</directory>
<includes>
<include>spring-*.xml</include>
</includes>
</testResource>
<testResource>
<directory>src/webapp/WEB-INF</directory>
<includes>
<include>spring-*.xml</include>
</includes>
</testResource>
<testResource>
<directory>src/test</directory>
<includes>
<include>spring-*.xml</include>
<include>hibernate.properties</include>
</includes>
</testResource>
<testResource>
<directory>src/resource</directory>
</testResource>
</testResources>
</build>
</project>
| {
"content_hash": "8f9d8797e3b4e6ac68d7ff7526b71306",
"timestamp": "",
"source": "github",
"line_count": 359,
"max_line_length": 201,
"avg_line_length": 32.50974930362117,
"alnum_prop": 0.6411618541684517,
"repo_name": "marktriggs/nyu-sakai-10.4",
"id": "92510e409d7621664a783aaf96adc0c49a0311d6",
"size": "11671",
"binary": false,
"copies": "2",
"ref": "refs/heads/scormcloud",
"path": "gradebook/app/standalone-app/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "59098"
},
{
"name": "ApacheConf",
"bytes": "15"
},
{
"name": "AspectJ",
"bytes": "6831"
},
{
"name": "Batchfile",
"bytes": "5172"
},
{
"name": "C++",
"bytes": "477"
},
{
"name": "CSS",
"bytes": "2952622"
},
{
"name": "ColdFusion",
"bytes": "146057"
},
{
"name": "HTML",
"bytes": "6183794"
},
{
"name": "Handlebars",
"bytes": "24740"
},
{
"name": "Java",
"bytes": "42729208"
},
{
"name": "JavaScript",
"bytes": "7061288"
},
{
"name": "Lasso",
"bytes": "26436"
},
{
"name": "PHP",
"bytes": "222029"
},
{
"name": "PLSQL",
"bytes": "2161361"
},
{
"name": "Perl",
"bytes": "72300"
},
{
"name": "Python",
"bytes": "86996"
},
{
"name": "Ruby",
"bytes": "26953"
},
{
"name": "SQLPL",
"bytes": "862"
},
{
"name": "Shell",
"bytes": "17279"
},
{
"name": "SourcePawn",
"bytes": "1220"
},
{
"name": "XSLT",
"bytes": "278954"
}
],
"symlink_target": ""
} |
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2015 The Bitcoin Core developers
// Copyright (c) 2015-2017 The Bitcoin Unlimited developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#ifdef HAVE_CONFIG_H
#include "config/bitcoin-config.h"
#endif
#include "netbase.h"
#include "hash.h"
#include "sync.h"
#include "uint256.h"
#include "random.h"
#include "util.h"
#include "utilstrencodings.h"
#ifdef HAVE_GETADDRINFO_A
#include <netdb.h>
#endif
#ifndef WIN32
#if HAVE_INET_PTON
#include <arpa/inet.h>
#endif
#include <fcntl.h>
#endif
#include <boost/algorithm/string/case_conv.hpp> // for to_lower()
#include <boost/algorithm/string/predicate.hpp> // for startswith() and endswith()
#include <boost/thread.hpp>
#if !defined(HAVE_MSG_NOSIGNAL) && !defined(MSG_NOSIGNAL)
#define MSG_NOSIGNAL 0
#endif
// Settings
// BU move to globals.cpp
extern proxyType proxyInfo[NET_MAX];
extern proxyType nameProxy;
extern CCriticalSection cs_proxyInfos;
int nConnectTimeout = DEFAULT_CONNECT_TIMEOUT;
bool fNameLookup = DEFAULT_NAME_LOOKUP;
static const unsigned char pchIPv4[12] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff };
// Need ample time for negotiation for very slow proxies such as Tor (milliseconds)
static const int SOCKS5_RECV_TIMEOUT = 20 * 1000;
enum Network ParseNetwork(std::string net) {
boost::to_lower(net);
if (net == "ipv4") return NET_IPV4;
if (net == "ipv6") return NET_IPV6;
if (net == "tor" || net == "onion") return NET_TOR;
return NET_UNROUTABLE;
}
std::string GetNetworkName(enum Network net) {
switch(net)
{
case NET_IPV4: return "ipv4";
case NET_IPV6: return "ipv6";
case NET_TOR: return "onion";
default: return "";
}
}
void SplitHostPort(std::string in, int &portOut, std::string &hostOut) {
size_t colon = in.find_last_of(':');
// if a : is found, and it either follows a [...], or no other : is in the string, treat it as port separator
bool fHaveColon = colon != in.npos;
bool fBracketed = fHaveColon && (in[0]=='[' && in[colon-1]==']'); // if there is a colon, and in[0]=='[', colon is not 0, so in[colon-1] is safe
bool fMultiColon = fHaveColon && (in.find_last_of(':',colon-1) != in.npos);
if (fHaveColon && (colon==0 || fBracketed || !fMultiColon)) {
int32_t n;
if (ParseInt32(in.substr(colon + 1), &n) && n > 0 && n < 0x10000) {
in = in.substr(0, colon);
portOut = n;
}
}
if (in.size()>0 && in[0] == '[' && in[in.size()-1] == ']')
hostOut = in.substr(1, in.size()-2);
else
hostOut = in;
}
bool static LookupIntern(const char *pszName, std::vector<CNetAddr>& vIP, unsigned int nMaxSolutions, bool fAllowLookup)
{
vIP.clear();
{
CNetAddr addr;
if (addr.SetSpecial(std::string(pszName))) {
vIP.push_back(addr);
return true;
}
}
#ifdef HAVE_GETADDRINFO_A
struct in_addr ipv4_addr;
#ifdef HAVE_INET_PTON
if (inet_pton(AF_INET, pszName, &ipv4_addr) > 0) {
vIP.push_back(CNetAddr(ipv4_addr));
return true;
}
struct in6_addr ipv6_addr;
if (inet_pton(AF_INET6, pszName, &ipv6_addr) > 0) {
vIP.push_back(CNetAddr(ipv6_addr));
return true;
}
#else
ipv4_addr.s_addr = inet_addr(pszName);
if (ipv4_addr.s_addr != INADDR_NONE) {
vIP.push_back(CNetAddr(ipv4_addr));
return true;
}
#endif
#endif
struct addrinfo aiHint;
memset(&aiHint, 0, sizeof(struct addrinfo));
aiHint.ai_socktype = SOCK_STREAM;
aiHint.ai_protocol = IPPROTO_TCP;
aiHint.ai_family = AF_UNSPEC;
#ifdef WIN32
aiHint.ai_flags = fAllowLookup ? 0 : AI_NUMERICHOST;
#else
aiHint.ai_flags = fAllowLookup ? AI_ADDRCONFIG : AI_NUMERICHOST;
#endif
struct addrinfo *aiRes = NULL;
#ifdef HAVE_GETADDRINFO_A
struct gaicb gcb, *query = &gcb;
memset(query, 0, sizeof(struct gaicb));
gcb.ar_name = pszName;
gcb.ar_request = &aiHint;
int nErr = getaddrinfo_a(GAI_NOWAIT, &query, 1, NULL);
if (nErr)
return false;
do {
// Should set the timeout limit to a reasonable value to avoid
// generating unnecessary checking call during the polling loop,
// while it can still response to stop request quick enough.
// 2 seconds looks fine in our situation.
struct timespec ts = { 2, 0 };
gai_suspend(&query, 1, &ts);
boost::this_thread::interruption_point();
nErr = gai_error(query);
if (0 == nErr)
aiRes = query->ar_result;
} while (nErr == EAI_INPROGRESS);
#else
int nErr = getaddrinfo(pszName, NULL, &aiHint, &aiRes);
#endif
if (nErr)
return false;
struct addrinfo *aiTrav = aiRes;
while (aiTrav != NULL && (nMaxSolutions == 0 || vIP.size() < nMaxSolutions))
{
if (aiTrav->ai_family == AF_INET)
{
assert(aiTrav->ai_addrlen >= sizeof(sockaddr_in));
vIP.push_back(CNetAddr(((struct sockaddr_in*)(aiTrav->ai_addr))->sin_addr));
}
if (aiTrav->ai_family == AF_INET6)
{
assert(aiTrav->ai_addrlen >= sizeof(sockaddr_in6));
struct sockaddr_in6* s6 = (struct sockaddr_in6*) aiTrav->ai_addr;
vIP.push_back(CNetAddr(s6->sin6_addr, s6->sin6_scope_id));
}
aiTrav = aiTrav->ai_next;
}
freeaddrinfo(aiRes);
return (vIP.size() > 0);
}
bool LookupHost(const char *pszName, std::vector<CNetAddr>& vIP, unsigned int nMaxSolutions, bool fAllowLookup)
{
std::string strHost(pszName);
if (strHost.empty())
return false;
if (boost::algorithm::starts_with(strHost, "[") && boost::algorithm::ends_with(strHost, "]"))
{
strHost = strHost.substr(1, strHost.size() - 2);
}
return LookupIntern(strHost.c_str(), vIP, nMaxSolutions, fAllowLookup);
}
bool Lookup(const char *pszName, std::vector<CService>& vAddr, int portDefault, unsigned int nMaxSolutions, bool fAllowLookup)
{
if (pszName[0] == 0)
return false;
int port = portDefault;
std::string hostname = "";
SplitHostPort(std::string(pszName), port, hostname);
std::vector<CNetAddr> vIP;
bool fRet = LookupIntern(hostname.c_str(), vIP, nMaxSolutions, fAllowLookup);
if (!fRet)
return false;
vAddr.resize(vIP.size());
for (unsigned int i = 0; i < vIP.size(); i++)
vAddr[i] = CService(vIP[i], port);
return true;
}
bool Lookup(const char *pszName, CService& addr, int portDefault, bool fAllowLookup)
{
std::vector<CService> vService;
bool fRet = Lookup(pszName, vService, portDefault, 1, fAllowLookup);
if (!fRet)
return false;
addr = vService[0];
return true;
}
bool LookupNumeric(const char *pszName, CService& addr, int portDefault)
{
return Lookup(pszName, addr, portDefault, false);
}
struct timeval MillisToTimeval(int64_t nTimeout)
{
struct timeval timeout;
timeout.tv_sec = nTimeout / 1000;
timeout.tv_usec = (nTimeout % 1000) * 1000;
return timeout;
}
/**
* Read bytes from socket. This will either read the full number of bytes requested
* or return False on error or timeout.
* This function can be interrupted by boost thread interrupt.
*
* @param data Buffer to receive into
* @param len Length of data to receive
* @param timeout Timeout in milliseconds for receive operation
*
* @note This function requires that hSocket is in non-blocking mode.
*/
bool static InterruptibleRecv(char* data, size_t len, int timeout, SOCKET& hSocket)
{
int64_t curTime = GetTimeMillis();
int64_t endTime = curTime + timeout;
// Maximum time to wait in one select call. It will take up until this time (in millis)
// to break off in case of an interruption.
const int64_t maxWait = 1000;
while (len > 0 && curTime < endTime) {
ssize_t ret = recv(hSocket, data, len, 0); // Optimistically try the recv first
if (ret > 0) {
len -= ret;
data += ret;
} else if (ret == 0) { // Unexpected disconnection
return false;
} else { // Other error or blocking
int nErr = WSAGetLastError();
if (nErr == WSAEINPROGRESS || nErr == WSAEWOULDBLOCK || nErr == WSAEINVAL) {
if (!IsSelectableSocket(hSocket)) {
return false;
}
struct timeval tval = MillisToTimeval(std::min(endTime - curTime, maxWait));
fd_set fdset;
FD_ZERO(&fdset);
FD_SET(hSocket, &fdset);
int nRet = select(hSocket + 1, &fdset, NULL, NULL, &tval);
if (nRet == SOCKET_ERROR) {
return false;
}
} else {
return false;
}
}
boost::this_thread::interruption_point();
curTime = GetTimeMillis();
}
return len == 0;
}
struct ProxyCredentials
{
std::string username;
std::string password;
};
std::string Socks5ErrorString(int err)
{
switch(err) {
case 0x01: return "general failure";
case 0x02: return "connection not allowed";
case 0x03: return "network unreachable";
case 0x04: return "host unreachable";
case 0x05: return "connection refused";
case 0x06: return "TTL expired";
case 0x07: return "protocol error";
case 0x08: return "address type not supported";
default: return "unknown";
}
}
/** Connect using SOCKS5 (as described in RFC1928) */
static bool Socks5(const std::string& strDest, int port, const ProxyCredentials *auth, SOCKET& hSocket)
{
LogPrint("net", "SOCKS5 connecting %s\n", strDest);
if (strDest.size() > 255) {
CloseSocket(hSocket);
return error("Hostname too long");
}
// Accepted authentication methods
std::vector<uint8_t> vSocks5Init;
vSocks5Init.push_back(0x05);
if (auth) {
vSocks5Init.push_back(0x02); // # METHODS
vSocks5Init.push_back(0x00); // X'00' NO AUTHENTICATION REQUIRED
vSocks5Init.push_back(0x02); // X'02' USERNAME/PASSWORD (RFC1929)
} else {
vSocks5Init.push_back(0x01); // # METHODS
vSocks5Init.push_back(0x00); // X'00' NO AUTHENTICATION REQUIRED
}
ssize_t ret = send(hSocket, (const char*)begin_ptr(vSocks5Init), vSocks5Init.size(), MSG_NOSIGNAL);
if (ret != (ssize_t)vSocks5Init.size()) {
CloseSocket(hSocket);
return error("Error sending to proxy");
}
char pchRet1[2];
if (!InterruptibleRecv(pchRet1, 2, SOCKS5_RECV_TIMEOUT, hSocket)) {
CloseSocket(hSocket);
LogPrintf("Socks5() connect to %s:%d failed: InterruptibleRecv() timeout or other failure\n", strDest, port);
return false;
}
if (pchRet1[0] != 0x05) {
CloseSocket(hSocket);
return error("Proxy failed to initialize");
}
if (pchRet1[1] == 0x02 && auth) {
// Perform username/password authentication (as described in RFC1929)
std::vector<uint8_t> vAuth;
vAuth.push_back(0x01);
if (auth->username.size() > 255 || auth->password.size() > 255)
{
CloseSocket(hSocket);
return error("Proxy username or password too long");
}
vAuth.push_back(auth->username.size());
vAuth.insert(vAuth.end(), auth->username.begin(), auth->username.end());
vAuth.push_back(auth->password.size());
vAuth.insert(vAuth.end(), auth->password.begin(), auth->password.end());
ret = send(hSocket, (const char*)begin_ptr(vAuth), vAuth.size(), MSG_NOSIGNAL);
if (ret != (ssize_t)vAuth.size()) {
CloseSocket(hSocket);
return error("Error sending authentication to proxy");
}
LogPrint("proxy", "SOCKS5 sending proxy authentication %s:%s\n", auth->username, auth->password);
char pchRetA[2];
if (!InterruptibleRecv(pchRetA, 2, SOCKS5_RECV_TIMEOUT, hSocket)) {
CloseSocket(hSocket);
return error("Error reading proxy authentication response");
}
if (pchRetA[0] != 0x01 || pchRetA[1] != 0x00) {
CloseSocket(hSocket);
return error("Proxy authentication unsuccessful");
}
} else if (pchRet1[1] == 0x00) {
// Perform no authentication
} else {
CloseSocket(hSocket);
return error("Proxy requested wrong authentication method %02x", pchRet1[1]);
}
std::vector<uint8_t> vSocks5;
vSocks5.push_back(0x05); // VER protocol version
vSocks5.push_back(0x01); // CMD CONNECT
vSocks5.push_back(0x00); // RSV Reserved
vSocks5.push_back(0x03); // ATYP DOMAINNAME
vSocks5.push_back(strDest.size()); // Length<=255 is checked at beginning of function
vSocks5.insert(vSocks5.end(), strDest.begin(), strDest.end());
vSocks5.push_back((port >> 8) & 0xFF);
vSocks5.push_back((port >> 0) & 0xFF);
ret = send(hSocket, (const char*)begin_ptr(vSocks5), vSocks5.size(), MSG_NOSIGNAL);
if (ret != (ssize_t)vSocks5.size()) {
CloseSocket(hSocket);
return error("Error sending to proxy");
}
char pchRet2[4];
if (!InterruptibleRecv(pchRet2, 4, SOCKS5_RECV_TIMEOUT, hSocket)) {
CloseSocket(hSocket);
return error("Error reading proxy response");
}
if (pchRet2[0] != 0x05) {
CloseSocket(hSocket);
return error("Proxy failed to accept request");
}
if (pchRet2[1] != 0x00) {
// Failures to connect to a peer that are not proxy errors
CloseSocket(hSocket);
LogPrintf("Socks5() connect to %s:%d failed: %s\n", strDest, port, Socks5ErrorString(pchRet2[1]));
return false;
}
if (pchRet2[2] != 0x00) {
CloseSocket(hSocket);
return error("Error: malformed proxy response");
}
char pchRet3[256];
switch (pchRet2[3])
{
case 0x01: ret = InterruptibleRecv(pchRet3, 4, SOCKS5_RECV_TIMEOUT, hSocket); break;
case 0x04: ret = InterruptibleRecv(pchRet3, 16, SOCKS5_RECV_TIMEOUT, hSocket); break;
case 0x03:
{
ret = InterruptibleRecv(pchRet3, 1, SOCKS5_RECV_TIMEOUT, hSocket);
if (!ret) {
CloseSocket(hSocket);
return error("Error reading from proxy");
}
int nRecv = pchRet3[0];
ret = InterruptibleRecv(pchRet3, nRecv, SOCKS5_RECV_TIMEOUT, hSocket);
break;
}
default: CloseSocket(hSocket); return error("Error: malformed proxy response");
}
if (!ret) {
CloseSocket(hSocket);
return error("Error reading from proxy");
}
if (!InterruptibleRecv(pchRet3, 2, SOCKS5_RECV_TIMEOUT, hSocket)) {
CloseSocket(hSocket);
return error("Error reading from proxy");
}
LogPrint("net", "SOCKS5 connected %s\n", strDest);
return true;
}
bool static ConnectSocketDirectly(const CService &addrConnect, SOCKET& hSocketRet, int nTimeout)
{
hSocketRet = INVALID_SOCKET;
struct sockaddr_storage sockaddr;
socklen_t len = sizeof(sockaddr);
if (!addrConnect.GetSockAddr((struct sockaddr*)&sockaddr, &len)) {
LogPrintf("Cannot connect to %s: unsupported network\n", addrConnect.ToString());
return false;
}
SOCKET hSocket = socket(((struct sockaddr*)&sockaddr)->sa_family, SOCK_STREAM, IPPROTO_TCP);
if (hSocket == INVALID_SOCKET)
return false;
int set = 1;
#ifdef SO_NOSIGPIPE
// Different way of disabling SIGPIPE on BSD
setsockopt(hSocket, SOL_SOCKET, SO_NOSIGPIPE, (void*)&set, sizeof(int));
#endif
//Disable Nagle's algorithm
#ifdef WIN32
setsockopt(hSocket, IPPROTO_TCP, TCP_NODELAY, (const char*)&set, sizeof(int));
#else
setsockopt(hSocket, IPPROTO_TCP, TCP_NODELAY, (void*)&set, sizeof(int));
#endif
// Set to non-blocking
if (!SetSocketNonBlocking(hSocket, true))
return error("ConnectSocketDirectly: Setting socket to non-blocking failed, error %s\n", NetworkErrorString(WSAGetLastError()));
if (connect(hSocket, (struct sockaddr*)&sockaddr, len) == SOCKET_ERROR)
{
int nErr = WSAGetLastError();
// WSAEINVAL is here because some legacy version of winsock uses it
if (nErr == WSAEINPROGRESS || nErr == WSAEWOULDBLOCK || nErr == WSAEINVAL)
{
struct timeval timeout = MillisToTimeval(nTimeout);
fd_set fdset;
FD_ZERO(&fdset);
FD_SET(hSocket, &fdset);
int nRet = select(hSocket + 1, NULL, &fdset, NULL, &timeout);
if (nRet == 0)
{
LogPrint("net", "connection to %s timeout\n", addrConnect.ToString());
CloseSocket(hSocket);
return false;
}
if (nRet == SOCKET_ERROR)
{
LogPrintf("select() for %s failed: %s\n", addrConnect.ToString(), NetworkErrorString(WSAGetLastError()));
CloseSocket(hSocket);
return false;
}
socklen_t nRetSize = sizeof(nRet);
#ifdef WIN32
if (getsockopt(hSocket, SOL_SOCKET, SO_ERROR, (char*)(&nRet), &nRetSize) == SOCKET_ERROR)
#else
if (getsockopt(hSocket, SOL_SOCKET, SO_ERROR, &nRet, &nRetSize) == SOCKET_ERROR)
#endif
{
LogPrintf("getsockopt() for %s failed: %s\n", addrConnect.ToString(), NetworkErrorString(WSAGetLastError()));
CloseSocket(hSocket);
return false;
}
if (nRet != 0)
{
LogPrintf("connect() to %s failed after select(): %s\n", addrConnect.ToString(), NetworkErrorString(nRet));
CloseSocket(hSocket);
return false;
}
}
#ifdef WIN32
else if (WSAGetLastError() != WSAEISCONN)
#else
else
#endif
{
LogPrintf("connect() to %s failed: %s\n", addrConnect.ToString(), NetworkErrorString(WSAGetLastError()));
CloseSocket(hSocket);
return false;
}
}
hSocketRet = hSocket;
return true;
}
bool SetProxy(enum Network net, const proxyType &addrProxy) {
assert(net >= 0 && net < NET_MAX);
if (!addrProxy.IsValid())
return false;
LOCK(cs_proxyInfos);
proxyInfo[net] = addrProxy;
return true;
}
bool GetProxy(enum Network net, proxyType &proxyInfoOut) {
assert(net >= 0 && net < NET_MAX);
LOCK(cs_proxyInfos);
if (!proxyInfo[net].IsValid())
return false;
proxyInfoOut = proxyInfo[net];
return true;
}
bool SetNameProxy(const proxyType &addrProxy) {
if (!addrProxy.IsValid())
return false;
LOCK(cs_proxyInfos);
nameProxy = addrProxy;
return true;
}
bool GetNameProxy(proxyType &nameProxyOut) {
LOCK(cs_proxyInfos);
if(!nameProxy.IsValid())
return false;
nameProxyOut = nameProxy;
return true;
}
bool HaveNameProxy() {
LOCK(cs_proxyInfos);
return nameProxy.IsValid();
}
bool IsProxy(const CNetAddr &addr) {
LOCK(cs_proxyInfos);
for (int i = 0; i < NET_MAX; i++) {
if (addr == (CNetAddr)proxyInfo[i].proxy)
return true;
}
return false;
}
static bool ConnectThroughProxy(const proxyType &proxy, const std::string& strDest, int port, SOCKET& hSocketRet, int nTimeout, bool *outProxyConnectionFailed)
{
SOCKET hSocket = INVALID_SOCKET;
// first connect to proxy server
if (!ConnectSocketDirectly(proxy.proxy, hSocket, nTimeout)) {
if (outProxyConnectionFailed)
*outProxyConnectionFailed = true;
return false;
}
// do socks negotiation
if (proxy.randomize_credentials) {
ProxyCredentials random_auth;
random_auth.username = strprintf("%i", insecure_rand());
random_auth.password = strprintf("%i", insecure_rand());
if (!Socks5(strDest, (unsigned short)port, &random_auth, hSocket))
return false;
} else {
if (!Socks5(strDest, (unsigned short)port, 0, hSocket))
return false;
}
hSocketRet = hSocket;
return true;
}
bool ConnectSocket(const CService &addrDest, SOCKET& hSocketRet, int nTimeout, bool *outProxyConnectionFailed)
{
proxyType proxy;
if (outProxyConnectionFailed)
*outProxyConnectionFailed = false;
if (GetProxy(addrDest.GetNetwork(), proxy))
return ConnectThroughProxy(proxy, addrDest.ToStringIP(), addrDest.GetPort(), hSocketRet, nTimeout, outProxyConnectionFailed);
else // no proxy needed (none set for target network)
return ConnectSocketDirectly(addrDest, hSocketRet, nTimeout);
}
bool ConnectSocketByName(CService &addr, SOCKET& hSocketRet, const char *pszDest, int portDefault, int nTimeout, bool *outProxyConnectionFailed)
{
std::string strDest;
int port = portDefault;
if (outProxyConnectionFailed)
*outProxyConnectionFailed = false;
SplitHostPort(std::string(pszDest), port, strDest);
proxyType nameProxy;
GetNameProxy(nameProxy);
CService addrResolved;
if (Lookup(strDest.c_str(), addrResolved, port, fNameLookup && !HaveNameProxy()))
{
if (addrResolved.IsValid())
{
addr = addrResolved;
return ConnectSocket(addr, hSocketRet, nTimeout);
}
}
addr = CService("0.0.0.0:0");
if (!HaveNameProxy())
return false;
return ConnectThroughProxy(nameProxy, strDest, port, hSocketRet, nTimeout, outProxyConnectionFailed);
}
void CNetAddr::Init()
{
memset(ip, 0, sizeof(ip));
scopeId = 0;
}
void CNetAddr::SetIP(const CNetAddr& ipIn)
{
memcpy(ip, ipIn.ip, sizeof(ip));
}
void CNetAddr::SetRaw(Network network, const uint8_t *ip_in)
{
switch(network)
{
case NET_IPV4:
memcpy(ip, pchIPv4, 12);
memcpy(ip+12, ip_in, 4);
break;
case NET_IPV6:
memcpy(ip, ip_in, 16);
break;
default:
assert(!"invalid network");
}
}
static const unsigned char pchOnionCat[] = {0xFD,0x87,0xD8,0x7E,0xEB,0x43};
bool CNetAddr::SetSpecial(const std::string &strName)
{
if (strName.size()>6 && strName.substr(strName.size() - 6, 6) == ".onion") {
std::vector<unsigned char> vchAddr = DecodeBase32(strName.substr(0, strName.size() - 6).c_str());
if (vchAddr.size() != 16-sizeof(pchOnionCat))
return false;
memcpy(ip, pchOnionCat, sizeof(pchOnionCat));
for (unsigned int i=0; i<16-sizeof(pchOnionCat); i++)
ip[i + sizeof(pchOnionCat)] = vchAddr[i];
return true;
}
return false;
}
CNetAddr::CNetAddr()
{
Init();
}
CNetAddr::CNetAddr(const struct in_addr& ipv4Addr)
{
SetRaw(NET_IPV4, (const uint8_t*)&ipv4Addr);
}
CNetAddr::CNetAddr(const struct in6_addr& ipv6Addr, const uint32_t scope)
{
SetRaw(NET_IPV6, (const uint8_t*)&ipv6Addr);
scopeId = scope;
}
CNetAddr::CNetAddr(const char *pszIp)
{
Init();
std::vector<CNetAddr> vIP;
if (LookupHost(pszIp, vIP, 1, false))
*this = vIP[0];
}
CNetAddr::CNetAddr(const std::string &strIp)
{
Init();
std::vector<CNetAddr> vIP;
if (LookupHost(strIp.c_str(), vIP, 1, false))
*this = vIP[0];
}
unsigned int CNetAddr::GetByte(int n) const
{
return ip[15-n];
}
bool CNetAddr::IsIPv4() const
{
return (memcmp(ip, pchIPv4, sizeof(pchIPv4)) == 0);
}
bool CNetAddr::IsIPv6() const
{
return (!IsIPv4() && !IsTor());
}
bool CNetAddr::IsRFC1918() const
{
return IsIPv4() && (
GetByte(3) == 10 ||
(GetByte(3) == 192 && GetByte(2) == 168) ||
(GetByte(3) == 172 && (GetByte(2) >= 16 && GetByte(2) <= 31)));
}
bool CNetAddr::IsRFC2544() const
{
return IsIPv4() && GetByte(3) == 198 && (GetByte(2) == 18 || GetByte(2) == 19);
}
bool CNetAddr::IsRFC3927() const
{
return IsIPv4() && (GetByte(3) == 169 && GetByte(2) == 254);
}
bool CNetAddr::IsRFC6598() const
{
return IsIPv4() && GetByte(3) == 100 && GetByte(2) >= 64 && GetByte(2) <= 127;
}
bool CNetAddr::IsRFC5737() const
{
return IsIPv4() && ((GetByte(3) == 192 && GetByte(2) == 0 && GetByte(1) == 2) ||
(GetByte(3) == 198 && GetByte(2) == 51 && GetByte(1) == 100) ||
(GetByte(3) == 203 && GetByte(2) == 0 && GetByte(1) == 113));
}
bool CNetAddr::IsRFC3849() const
{
return GetByte(15) == 0x20 && GetByte(14) == 0x01 && GetByte(13) == 0x0D && GetByte(12) == 0xB8;
}
bool CNetAddr::IsRFC3964() const
{
return (GetByte(15) == 0x20 && GetByte(14) == 0x02);
}
bool CNetAddr::IsRFC6052() const
{
static const unsigned char pchRFC6052[] = {0,0x64,0xFF,0x9B,0,0,0,0,0,0,0,0};
return (memcmp(ip, pchRFC6052, sizeof(pchRFC6052)) == 0);
}
bool CNetAddr::IsRFC4380() const
{
return (GetByte(15) == 0x20 && GetByte(14) == 0x01 && GetByte(13) == 0 && GetByte(12) == 0);
}
bool CNetAddr::IsRFC4862() const
{
static const unsigned char pchRFC4862[] = {0xFE,0x80,0,0,0,0,0,0};
return (memcmp(ip, pchRFC4862, sizeof(pchRFC4862)) == 0);
}
bool CNetAddr::IsRFC4193() const
{
return ((GetByte(15) & 0xFE) == 0xFC);
}
bool CNetAddr::IsRFC6145() const
{
static const unsigned char pchRFC6145[] = {0,0,0,0,0,0,0,0,0xFF,0xFF,0,0};
return (memcmp(ip, pchRFC6145, sizeof(pchRFC6145)) == 0);
}
bool CNetAddr::IsRFC4843() const
{
return (GetByte(15) == 0x20 && GetByte(14) == 0x01 && GetByte(13) == 0x00 && (GetByte(12) & 0xF0) == 0x10);
}
bool CNetAddr::IsTor() const
{
return (memcmp(ip, pchOnionCat, sizeof(pchOnionCat)) == 0);
}
bool CNetAddr::IsLocal() const
{
// IPv4 loopback
if (IsIPv4() && (GetByte(3) == 127 || GetByte(3) == 0))
return true;
// IPv6 loopback (::1/128)
static const unsigned char pchLocal[16] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1};
if (memcmp(ip, pchLocal, 16) == 0)
return true;
return false;
}
bool CNetAddr::IsMulticast() const
{
return (IsIPv4() && (GetByte(3) & 0xF0) == 0xE0)
|| (GetByte(15) == 0xFF);
}
bool CNetAddr::IsValid() const
{
// Cleanup 3-byte shifted addresses caused by garbage in size field
// of addr messages from versions before 0.2.9 checksum.
// Two consecutive addr messages look like this:
// header20 vectorlen3 addr26 addr26 addr26 header20 vectorlen3 addr26 addr26 addr26...
// so if the first length field is garbled, it reads the second batch
// of addr misaligned by 3 bytes.
if (memcmp(ip, pchIPv4+3, sizeof(pchIPv4)-3) == 0)
return false;
// unspecified IPv6 address (::/128)
unsigned char ipNone[16] = {};
if (memcmp(ip, ipNone, 16) == 0)
return false;
// documentation IPv6 address
if (IsRFC3849())
return false;
if (IsIPv4())
{
// INADDR_NONE
uint32_t ipNone = INADDR_NONE;
if (memcmp(ip+12, &ipNone, 4) == 0)
return false;
// 0
ipNone = 0;
if (memcmp(ip+12, &ipNone, 4) == 0)
return false;
}
return true;
}
bool CNetAddr::IsRoutable() const
{
return IsValid() && !(IsRFC1918() || IsRFC2544() || IsRFC3927() || IsRFC4862() || IsRFC6598() || IsRFC5737() || (IsRFC4193() && !IsTor()) || IsRFC4843() || IsLocal());
}
enum Network CNetAddr::GetNetwork() const
{
if (!IsRoutable())
return NET_UNROUTABLE;
if (IsIPv4())
return NET_IPV4;
if (IsTor())
return NET_TOR;
return NET_IPV6;
}
std::string CNetAddr::ToStringIP() const
{
if (IsTor())
return EncodeBase32(&ip[6], 10) + ".onion";
CService serv(*this, 0);
struct sockaddr_storage sockaddr;
socklen_t socklen = sizeof(sockaddr);
if (serv.GetSockAddr((struct sockaddr*)&sockaddr, &socklen)) {
char name[1025] = "";
if (!getnameinfo((const struct sockaddr*)&sockaddr, socklen, name, sizeof(name), NULL, 0, NI_NUMERICHOST))
return std::string(name);
}
if (IsIPv4())
return strprintf("%u.%u.%u.%u", GetByte(3), GetByte(2), GetByte(1), GetByte(0));
else
return strprintf("%x:%x:%x:%x:%x:%x:%x:%x",
GetByte(15) << 8 | GetByte(14), GetByte(13) << 8 | GetByte(12),
GetByte(11) << 8 | GetByte(10), GetByte(9) << 8 | GetByte(8),
GetByte(7) << 8 | GetByte(6), GetByte(5) << 8 | GetByte(4),
GetByte(3) << 8 | GetByte(2), GetByte(1) << 8 | GetByte(0));
}
std::string CNetAddr::ToString() const
{
return ToStringIP();
}
bool operator==(const CNetAddr& a, const CNetAddr& b)
{
return (memcmp(a.ip, b.ip, 16) == 0);
}
bool operator!=(const CNetAddr& a, const CNetAddr& b)
{
return (memcmp(a.ip, b.ip, 16) != 0);
}
bool operator<(const CNetAddr& a, const CNetAddr& b)
{
return (memcmp(a.ip, b.ip, 16) < 0);
}
bool CNetAddr::GetInAddr(struct in_addr* pipv4Addr) const
{
if (!IsIPv4())
return false;
memcpy(pipv4Addr, ip+12, 4);
return true;
}
bool CNetAddr::GetIn6Addr(struct in6_addr* pipv6Addr) const
{
memcpy(pipv6Addr, ip, 16);
return true;
}
// get canonical identifier of an address' group
// no two connections will be attempted to addresses with the same group
std::vector<unsigned char> CNetAddr::GetGroup() const
{
std::vector<unsigned char> vchRet;
int nClass = NET_IPV6;
int nStartByte = 0;
int nBits = 16;
// all local addresses belong to the same group
if (IsLocal())
{
nClass = 255;
nBits = 0;
}
// all unroutable addresses belong to the same group
if (!IsRoutable())
{
nClass = NET_UNROUTABLE;
nBits = 0;
}
// for IPv4 addresses, '1' + the 16 higher-order bits of the IP
// includes mapped IPv4, SIIT translated IPv4, and the well-known prefix
else if (IsIPv4() || IsRFC6145() || IsRFC6052())
{
nClass = NET_IPV4;
nStartByte = 12;
}
// for 6to4 tunnelled addresses, use the encapsulated IPv4 address
else if (IsRFC3964())
{
nClass = NET_IPV4;
nStartByte = 2;
}
// for Teredo-tunnelled IPv6 addresses, use the encapsulated IPv4 address
else if (IsRFC4380())
{
vchRet.push_back(NET_IPV4);
vchRet.push_back(GetByte(3) ^ 0xFF);
vchRet.push_back(GetByte(2) ^ 0xFF);
return vchRet;
}
else if (IsTor())
{
nClass = NET_TOR;
nStartByte = 6;
nBits = 4;
}
// for he.net, use /36 groups
else if (GetByte(15) == 0x20 && GetByte(14) == 0x01 && GetByte(13) == 0x04 && GetByte(12) == 0x70)
nBits = 36;
// for the rest of the IPv6 network, use /32 groups
else
nBits = 32;
vchRet.push_back(nClass);
while (nBits >= 8)
{
vchRet.push_back(GetByte(15 - nStartByte));
nStartByte++;
nBits -= 8;
}
if (nBits > 0)
vchRet.push_back(GetByte(15 - nStartByte) | ((1 << (8 - nBits)) - 1));
return vchRet;
}
uint64_t CNetAddr::GetHash() const
{
uint256 hash = Hash(&ip[0], &ip[16]);
uint64_t nRet;
memcpy(&nRet, &hash, sizeof(nRet));
return nRet;
}
// private extensions to enum Network, only returned by GetExtNetwork,
// and only used in GetReachabilityFrom
static const int NET_UNKNOWN = NET_MAX + 0;
static const int NET_TEREDO = NET_MAX + 1;
int static GetExtNetwork(const CNetAddr *addr)
{
if (addr == NULL)
return NET_UNKNOWN;
if (addr->IsRFC4380())
return NET_TEREDO;
return addr->GetNetwork();
}
/** Calculates a metric for how reachable (*this) is from a given partner */
int CNetAddr::GetReachabilityFrom(const CNetAddr *paddrPartner) const
{
enum Reachability {
REACH_UNREACHABLE,
REACH_DEFAULT,
REACH_TEREDO,
REACH_IPV6_WEAK,
REACH_IPV4,
REACH_IPV6_STRONG,
REACH_PRIVATE
};
if (!IsRoutable())
return REACH_UNREACHABLE;
int ourNet = GetExtNetwork(this);
int theirNet = GetExtNetwork(paddrPartner);
bool fTunnel = IsRFC3964() || IsRFC6052() || IsRFC6145();
switch(theirNet) {
case NET_IPV4:
switch(ourNet) {
default: return REACH_DEFAULT;
case NET_IPV4: return REACH_IPV4;
}
case NET_IPV6:
switch(ourNet) {
default: return REACH_DEFAULT;
case NET_TEREDO: return REACH_TEREDO;
case NET_IPV4: return REACH_IPV4;
case NET_IPV6: return fTunnel ? REACH_IPV6_WEAK : REACH_IPV6_STRONG; // only prefer giving our IPv6 address if it's not tunnelled
}
case NET_TOR:
switch(ourNet) {
default: return REACH_DEFAULT;
case NET_IPV4: return REACH_IPV4; // Tor users can connect to IPv4 as well
case NET_TOR: return REACH_PRIVATE;
}
case NET_TEREDO:
switch(ourNet) {
default: return REACH_DEFAULT;
case NET_TEREDO: return REACH_TEREDO;
case NET_IPV6: return REACH_IPV6_WEAK;
case NET_IPV4: return REACH_IPV4;
}
case NET_UNKNOWN:
case NET_UNROUTABLE:
default:
switch(ourNet) {
default: return REACH_DEFAULT;
case NET_TEREDO: return REACH_TEREDO;
case NET_IPV6: return REACH_IPV6_WEAK;
case NET_IPV4: return REACH_IPV4;
case NET_TOR: return REACH_PRIVATE; // either from Tor, or don't care about our address
}
}
}
void CService::Init()
{
port = 0;
}
CService::CService()
{
Init();
}
CService::CService(const CNetAddr& cip, unsigned short portIn) : CNetAddr(cip), port(portIn)
{
}
CService::CService(const struct in_addr& ipv4Addr, unsigned short portIn) : CNetAddr(ipv4Addr), port(portIn)
{
}
CService::CService(const struct in6_addr& ipv6Addr, unsigned short portIn) : CNetAddr(ipv6Addr), port(portIn)
{
}
CService::CService(const struct sockaddr_in& addr) : CNetAddr(addr.sin_addr), port(ntohs(addr.sin_port))
{
assert(addr.sin_family == AF_INET);
}
CService::CService(const struct sockaddr_in6 &addr) : CNetAddr(addr.sin6_addr, addr.sin6_scope_id), port(ntohs(addr.sin6_port))
{
assert(addr.sin6_family == AF_INET6);
}
bool CService::SetSockAddr(const struct sockaddr *paddr)
{
switch (paddr->sa_family) {
case AF_INET:
*this = CService(*(const struct sockaddr_in*)paddr);
return true;
case AF_INET6:
*this = CService(*(const struct sockaddr_in6*)paddr);
return true;
default:
return false;
}
}
CService::CService(const char *pszIpPort)
{
Init();
CService ip;
if (Lookup(pszIpPort, ip, 0, false))
*this = ip;
}
CService::CService(const char *pszIpPort, int portDefault)
{
Init();
CService ip;
if (Lookup(pszIpPort, ip, portDefault, false))
*this = ip;
}
CService::CService(const std::string &strIpPort)
{
Init();
CService ip;
if (Lookup(strIpPort.c_str(), ip, 0, false))
*this = ip;
}
CService::CService(const std::string &strIpPort, int portDefault)
{
Init();
CService ip;
if (Lookup(strIpPort.c_str(), ip, portDefault, false))
*this = ip;
}
unsigned short CService::GetPort() const
{
return port;
}
bool operator==(const CService& a, const CService& b)
{
return (CNetAddr)a == (CNetAddr)b && a.port == b.port;
}
bool operator!=(const CService& a, const CService& b)
{
return (CNetAddr)a != (CNetAddr)b || a.port != b.port;
}
bool operator<(const CService& a, const CService& b)
{
return (CNetAddr)a < (CNetAddr)b || ((CNetAddr)a == (CNetAddr)b && a.port < b.port);
}
bool CService::GetSockAddr(struct sockaddr* paddr, socklen_t *addrlen) const
{
if (IsIPv4()) {
if (*addrlen < (socklen_t)sizeof(struct sockaddr_in))
return false;
*addrlen = sizeof(struct sockaddr_in);
struct sockaddr_in *paddrin = (struct sockaddr_in*)paddr;
memset(paddrin, 0, *addrlen);
if (!GetInAddr(&paddrin->sin_addr))
return false;
paddrin->sin_family = AF_INET;
paddrin->sin_port = htons(port);
return true;
}
if (IsIPv6()) {
if (*addrlen < (socklen_t)sizeof(struct sockaddr_in6))
return false;
*addrlen = sizeof(struct sockaddr_in6);
struct sockaddr_in6 *paddrin6 = (struct sockaddr_in6*)paddr;
memset(paddrin6, 0, *addrlen);
if (!GetIn6Addr(&paddrin6->sin6_addr))
return false;
paddrin6->sin6_scope_id = scopeId;
paddrin6->sin6_family = AF_INET6;
paddrin6->sin6_port = htons(port);
return true;
}
return false;
}
std::vector<unsigned char> CService::GetKey() const
{
std::vector<unsigned char> vKey;
vKey.resize(18);
memcpy(&vKey[0], ip, 16);
vKey[16] = port / 0x100;
vKey[17] = port & 0x0FF;
return vKey;
}
std::string CService::ToStringPort() const
{
return strprintf("%u", port);
}
std::string CService::ToStringIPPort() const
{
if (IsIPv4() || IsTor()) {
return ToStringIP() + ":" + ToStringPort();
} else {
return "[" + ToStringIP() + "]:" + ToStringPort();
}
}
std::string CService::ToString() const
{
return ToStringIPPort();
}
void CService::SetPort(unsigned short portIn)
{
port = portIn;
}
CSubNet::CSubNet():
valid(false)
{
memset(netmask, 0, sizeof(netmask));
}
CSubNet::CSubNet(const std::string &strSubnet)
{
size_t slash = strSubnet.find_last_of('/');
std::vector<CNetAddr> vIP;
valid = true;
// Default to /32 (IPv4) or /128 (IPv6), i.e. match single address
memset(netmask, 255, sizeof(netmask));
std::string strAddress = strSubnet.substr(0, slash);
if (LookupHost(strAddress.c_str(), vIP, 1, false))
{
network = vIP[0];
if (slash != strSubnet.npos)
{
std::string strNetmask = strSubnet.substr(slash + 1);
int32_t n;
// IPv4 addresses start at offset 12, and first 12 bytes must match, so just offset n
const int astartofs = network.IsIPv4() ? 12 : 0;
if (ParseInt32(strNetmask, &n)) // If valid number, assume /24 symtex
{
if(n >= 0 && n <= (128 - astartofs*8)) // Only valid if in range of bits of address
{
n += astartofs*8;
// Clear bits [n..127]
for (; n < 128; ++n)
netmask[n>>3] &= ~(1<<(7-(n&7)));
}
else
{
valid = false;
}
}
else // If not a valid number, try full netmask syntax
{
if (LookupHost(strNetmask.c_str(), vIP, 1, false)) // Never allow lookup for netmask
{
// Copy only the *last* four bytes in case of IPv4, the rest of the mask should stay 1's as
// we don't want pchIPv4 to be part of the mask.
for(int x=astartofs; x<16; ++x)
netmask[x] = vIP[0].ip[x];
}
else
{
valid = false;
}
}
}
}
else
{
valid = false;
}
// Normalize network according to netmask
for(int x=0; x<16; ++x)
network.ip[x] &= netmask[x];
}
CSubNet::CSubNet(const CNetAddr &addr):
valid(addr.IsValid())
{
memset(netmask, 255, sizeof(netmask));
network = addr;
}
bool CSubNet::Match(const CNetAddr &addr) const
{
if (!valid || !addr.IsValid())
return false;
for(int x=0; x<16; ++x)
if ((addr.ip[x] & netmask[x]) != network.ip[x])
return false;
return true;
}
static inline int NetmaskBits(uint8_t x)
{
switch(x) {
case 0x00: return 0; break;
case 0x80: return 1; break;
case 0xc0: return 2; break;
case 0xe0: return 3; break;
case 0xf0: return 4; break;
case 0xf8: return 5; break;
case 0xfc: return 6; break;
case 0xfe: return 7; break;
case 0xff: return 8; break;
default: return -1; break;
}
}
std::string CSubNet::ToString() const
{
/* Parse binary 1{n}0{N-n} to see if mask can be represented as /n */
int cidr = 0;
bool valid_cidr = true;
int n = network.IsIPv4() ? 12 : 0;
for (; n < 16 && netmask[n] == 0xff; ++n)
cidr += 8;
if (n < 16) {
int bits = NetmaskBits(netmask[n]);
if (bits < 0)
valid_cidr = false;
else
cidr += bits;
++n;
}
for (; n < 16 && valid_cidr; ++n)
if (netmask[n] != 0x00)
valid_cidr = false;
/* Format output */
std::string strNetmask;
if (valid_cidr) {
strNetmask = strprintf("%u", cidr);
} else {
if (network.IsIPv4())
strNetmask = strprintf("%u.%u.%u.%u", netmask[12], netmask[13], netmask[14], netmask[15]);
else
strNetmask = strprintf("%x:%x:%x:%x:%x:%x:%x:%x",
netmask[0] << 8 | netmask[1], netmask[2] << 8 | netmask[3],
netmask[4] << 8 | netmask[5], netmask[6] << 8 | netmask[7],
netmask[8] << 8 | netmask[9], netmask[10] << 8 | netmask[11],
netmask[12] << 8 | netmask[13], netmask[14] << 8 | netmask[15]);
}
return network.ToString() + "/" + strNetmask;
}
bool CSubNet::IsValid() const
{
return valid;
}
bool operator==(const CSubNet& a, const CSubNet& b)
{
return a.valid == b.valid && a.network == b.network && !memcmp(a.netmask, b.netmask, 16);
}
bool operator!=(const CSubNet& a, const CSubNet& b)
{
return !(a==b);
}
bool operator<(const CSubNet& a, const CSubNet& b)
{
return (a.network < b.network || (a.network == b.network && memcmp(a.netmask, b.netmask, 16) < 0));
}
#ifdef WIN32
std::string NetworkErrorString(int err)
{
char buf[256];
buf[0] = 0;
if(FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS | FORMAT_MESSAGE_MAX_WIDTH_MASK,
NULL, err, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
buf, sizeof(buf), NULL))
{
return strprintf("%s (%d)", buf, err);
}
else
{
return strprintf("Unknown error (%d)", err);
}
}
#else
std::string NetworkErrorString(int err)
{
char buf[256];
const char *s = buf;
buf[0] = 0;
/* Too bad there are two incompatible implementations of the
* thread-safe strerror. */
#ifdef STRERROR_R_CHAR_P /* GNU variant can return a pointer outside the passed buffer */
s = strerror_r(err, buf, sizeof(buf));
#else /* POSIX variant always returns message in buffer */
if (strerror_r(err, buf, sizeof(buf)))
buf[0] = 0;
#endif
return strprintf("%s (%d)", s, err);
}
#endif
bool CloseSocket(SOCKET& hSocket)
{
if (hSocket == INVALID_SOCKET)
return false;
#ifdef WIN32
int ret = closesocket(hSocket);
#else
int ret = close(hSocket);
#endif
hSocket = INVALID_SOCKET;
return ret != SOCKET_ERROR;
}
bool SetSocketNonBlocking(SOCKET& hSocket, bool fNonBlocking)
{
if (fNonBlocking) {
#ifdef WIN32
u_long nOne = 1;
if (ioctlsocket(hSocket, FIONBIO, &nOne) == SOCKET_ERROR) {
#else
int fFlags = fcntl(hSocket, F_GETFL, 0);
if (fcntl(hSocket, F_SETFL, fFlags | O_NONBLOCK) == SOCKET_ERROR) {
#endif
CloseSocket(hSocket);
return false;
}
} else {
#ifdef WIN32
u_long nZero = 0;
if (ioctlsocket(hSocket, FIONBIO, &nZero) == SOCKET_ERROR) {
#else
int fFlags = fcntl(hSocket, F_GETFL, 0);
if (fcntl(hSocket, F_SETFL, fFlags & ~O_NONBLOCK) == SOCKET_ERROR) {
#endif
CloseSocket(hSocket);
return false;
}
}
return true;
}
| {
"content_hash": "9165de442be9b3f9f8595a8f11cf2047",
"timestamp": "",
"source": "github",
"line_count": 1486,
"max_line_length": 171,
"avg_line_length": 29.534320323014803,
"alnum_prop": 0.5962222019686475,
"repo_name": "ftrader-bitcoinunlimited/hardfork_prototype_1_mvf-bu",
"id": "782c7592fa3bf4b559bba5ccd895819b0eb8062d",
"size": "43888",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/netbase.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "682360"
},
{
"name": "C++",
"bytes": "5103219"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50622"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "189719"
},
{
"name": "Makefile",
"bytes": "111286"
},
{
"name": "Objective-C",
"bytes": "5785"
},
{
"name": "Objective-C++",
"bytes": "7360"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "963126"
},
{
"name": "QMake",
"bytes": "2020"
},
{
"name": "Roff",
"bytes": "3821"
},
{
"name": "Shell",
"bytes": "44285"
}
],
"symlink_target": ""
} |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE36_Absolute_Path_Traversal__char_file_open_72b.cpp
Label Definition File: CWE36_Absolute_Path_Traversal.label.xml
Template File: sources-sink-72b.tmpl.cpp
*/
/*
* @description
* CWE: 36 Absolute Path Traversal
* BadSource: file Read input from a file
* GoodSource: Full path and file name
* Sinks: open
* BadSink : Open the file named in data using open()
* Flow Variant: 72 Data flow: data passed in a vector from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <vector>
#ifndef _WIN32
#include <wchar.h>
#endif
#ifdef _WIN32
#define OPEN _open
#define CLOSE _close
#else
#include <unistd.h>
#define OPEN open
#define CLOSE close
#endif
using namespace std;
namespace CWE36_Absolute_Path_Traversal__char_file_open_72
{
#ifndef OMITBAD
void badSink(vector<char *> dataVector)
{
/* copy data out of dataVector */
char * data = dataVector[2];
{
int fileDesc;
/* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */
fileDesc = OPEN(data, O_RDWR|O_CREAT, S_IREAD|S_IWRITE);
if (fileDesc != -1)
{
CLOSE(fileDesc);
}
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(vector<char *> dataVector)
{
char * data = dataVector[2];
{
int fileDesc;
/* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */
fileDesc = OPEN(data, O_RDWR|O_CREAT, S_IREAD|S_IWRITE);
if (fileDesc != -1)
{
CLOSE(fileDesc);
}
}
}
#endif /* OMITGOOD */
} /* close namespace */
| {
"content_hash": "a474e5b498ec94b84290cc2c5a6a98f3",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 109,
"avg_line_length": 23.710526315789473,
"alnum_prop": 0.6281908990011099,
"repo_name": "JianpingZeng/xcc",
"id": "f433a05b748fba313f327ba9d9879738fa30aaf6",
"size": "1802",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xcc/test/juliet/testcases/CWE36_Absolute_Path_Traversal/s02/CWE36_Absolute_Path_Traversal__char_file_open_72b.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<title>https://lucumt.info/tags/html5/</title>
<link rel="canonical" href="https://lucumt.info/tags/html5/">
<meta name="robots" content="noindex">
<meta charset="utf-8">
<meta http-equiv="refresh" content="0; url=https://lucumt.info/tags/html5/">
</head>
</html>
| {
"content_hash": "c5abf2344ad6644e703a55b4d0baafb2",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 80,
"avg_line_length": 32.8,
"alnum_prop": 0.6371951219512195,
"repo_name": "lucumt/ghblog",
"id": "f35f313015193cfc88f6eaa0fdcf688dc57dd4a4",
"size": "328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/tags/html5/page/1/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2039180"
},
{
"name": "JavaScript",
"bytes": "20976"
},
{
"name": "Makefile",
"bytes": "144"
},
{
"name": "SCSS",
"bytes": "98659"
},
{
"name": "XSLT",
"bytes": "7770"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/">
<channel>
<title>{Vincent Bonhomme}</title>
<description>Blog and website of VincentBonhomme, Web designer.</description>
<link>http://vincentbonhomme.github.io/resume</link>
<atom:link href="http://vincentbonhomme.github.io/resume/feed.xml" rel="self" type="application/rss+xml" />
<item>
<title>Welcome to { Personal } Jekyll Theme!</title>
<description>Click the view more posts link bellow, to see the currently available post-tutorials to help you get your { Personal } website up and running quicker!
</description>
<pubDate>Sat, 27 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/27/welcome.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/27/welcome.html</guid>
</item>
<item>
<title>Modifying the HTML HEAD</title>
<description>In the _config.yml, find and set the variables of the Head section:
</description>
<pubDate>Thu, 25 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/25/html-head-tutorial.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/25/html-head-tutorial.html</guid>
</item>
<item>
<title>Modifying the Intro</title>
<description>The intro part of the index page is consisted of three elements:
</description>
<pubDate>Wed, 24 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/24/intro-layout-tutorial.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/24/intro-layout-tutorial.html</guid>
</item>
<item>
<title>Setting up the Blog</title>
<description>A website is truly personal if it's your blog as well, this place of the internet where you can place your thoughts about anything!
</description>
<pubDate>Sat, 20 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/20/blog-tutorial.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/20/blog-tutorial.html</guid>
</item>
<item>
<title>Writing posts</title>
<description>Every file with the format <i>YYYY-MM-DD-post-title.markup</i> will be processed as a
post, with publication date <i>YYYY-MM-DD</i>.
</description>
<pubDate>Fri, 19 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/19/writing-posts.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/19/writing-posts.html</guid>
</item>
<item>
<title>Generating your Blog's RSS feed</title>
<description>Sit back, build the jekyll theme and it will be autogenerated in /feed.xml!
</description>
<pubDate>Mon, 15 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/15/rss-tutorial.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/15/rss-tutorial.html</guid>
</item>
<item>
<title>Adding a life event</title>
<description>One of the cool features of { Personal } is that enables you to tell your life's story in
the form of a timeline of photos, dates and text descriptions. Let's see how it works.
</description>
<pubDate>Fri, 12 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/12/timeline-tutorial.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/12/timeline-tutorial.html</guid>
</item>
<item>
<title>Modifying the social buttons</title>
<description>Social buttons (rendered in the footer) are great for having a small hub with all your social footprint.
In order to change them, go to _config.yml and edit the social list:
</description>
<pubDate>Thu, 11 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/11/social-buttons-tutorial.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/11/social-buttons-tutorial.html</guid>
</item>
<item>
<title>Modifying the Contact</title>
<description>The contact part of the index page is consisted of two elements:
</description>
<pubDate>Wed, 10 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/10/contact-tutorial.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/10/contact-tutorial.html</guid>
</item>
<item>
<title>Modifying the 404 page</title>
<description>You can change the text from the 404.html file and set the image from the _config.yml:
</description>
<pubDate>Fri, 05 Jun 2015 00:00:00 +0200</pubDate>
<link>http://vincentbonhomme.github.io/resume/tech/2015/06/05/404-page.html</link>
<guid isPermaLink="true">http://vincentbonhomme.github.io/resume/tech/2015/06/05/404-page.html</guid>
</item>
</channel>
</rss>
| {
"content_hash": "59e9af3fe50f9512a566853b0632b47f",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 168,
"avg_line_length": 37.53472222222222,
"alnum_prop": 0.6923219241443108,
"repo_name": "VincentBonhomme/resume",
"id": "5548c9e17dc0abf6bc4421b7d23bbef639f5de0e",
"size": "5405",
"binary": false,
"copies": "2",
"ref": "refs/heads/gh-pages",
"path": "_site/feed.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14237"
},
{
"name": "HTML",
"bytes": "551391"
},
{
"name": "JavaScript",
"bytes": "49971"
},
{
"name": "Ruby",
"bytes": "4227"
}
],
"symlink_target": ""
} |
<?php
echo '<pre>';
var_dump($_SERVER);
require 'libs/PDOFactory.php';
require 'libs/ProdutoModel.php';
require 'libs/ProdutoModelFactory.php';
$produtoModel = ProdutoModelFactory::create();
$produto = $produtoModel->getById($_GET['id']);
if ($produto === false) {
die('Produto não encontrado!');
}
require 'templates/visualizar-produto.phtml';
?>
| {
"content_hash": "7c63277cfdb741a86bdbbab3389c49ce",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 47,
"avg_line_length": 18.736842105263158,
"alnum_prop": 0.699438202247191,
"repo_name": "jonataa/curso-php-free",
"id": "286e49b04293818d83035613291a8c249e11a111",
"size": "357",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "exemplos/conexao-db/visualizar-produto.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19"
},
{
"name": "HTML",
"bytes": "4118"
},
{
"name": "PHP",
"bytes": "28622"
}
],
"symlink_target": ""
} |
require "spec_helper"
module Omnibus
describe ArtifactoryPublisher do
let(:path) { "/path/to/files/*.deb" }
let(:repository) { "REPO" }
let(:package) do
double(Package,
path: "/path/to/files/chef.deb",
name: "chef.deb",
content: "BINARY",
validate!: true)
end
let(:metadata) do
Metadata.new(package,
name: "chef",
friendly_name: "Chef",
homepage: "https://www.getchef.com",
version: "11.0.6",
iteration: 1,
license: "Apache-2.0",
basename: "chef.deb",
platform: "ubuntu",
platform_version: "14.04",
arch: "x86_64",
sha1: "SHA1",
sha256: "SHA256",
sha512: "SHA512",
md5: "ABCDEF123456",
version_manifest: {
manifest_format: 1,
build_version: "11.0.6",
build_git_revision: "2e763ac957b308ba95cef256c2491a5a55a163cc",
software: {
zlib: {
locked_source: {
md5: "44d667c142d7cda120332623eab69f40",
url: "http://iweb.dl.sourceforge.net/project/libpng/zlib/1.2.8/zlib-1.2.8.tar.gz",
},
locked_version: "1.2.8",
source_type: "url",
described_version: "1.2.8",
license: "Zlib",
},
openssl: {
locked_source: {
md5: "562986f6937aabc7c11a6d376d8a0d26",
extract: "lax_tar",
url: "http://iweb.dl.sourceforge.net/project/libpng/zlib/1.2.8/zlib-1.2.8.tar.gz",
},
locked_version: "1.0.1s",
source_type: "url",
described_version: "1.0.1s",
license: "OpenSSL",
},
ruby: {
locked_source: {
md5: "091b62f0a9796a3c55de2a228a0e6ef3",
url: "https://cache.ruby-lang.org/pub/ruby/2.1/ruby-2.1.8.tar.gz",
},
locked_version: "2.1.8",
source_type: "url",
described_version: "2.1.8",
license: "BSD-2-Clause",
},
ohai: {
locked_source: {
git: "https://github.com/chef/ohai.git",
},
locked_version: "fec0959aa5da5ce7ba0e07740dbc08546a8f53f0",
source_type: "git",
described_version: "master",
license: "Apache-2.0",
},
chef: {
locked_source: {
path: "/home/jenkins/workspace/chef-build/architecture/x86_64/platform/ubuntu-10.04/project/chef/role/builder/omnibus/files/../..",
options: {
exclude: [
"omnibus/vendor",
],
},
},
locked_version: "local_source",
source_type: "path",
described_version: "local_source",
license: "Apache-2.0",
},
},
})
end
let(:packages) { [package] }
let(:client) { double("Artifactory::Client") }
let(:artifact) { double("Artifactory::Resource::Artifact", upload: nil) }
let(:build) { double("Artifactory::Resource::Build") }
let(:package_properties) do
{
"omnibus.architecture" => "x86_64",
"omnibus.iteration" => 1,
"omnibus.md5" => "ABCDEF123456",
"omnibus.platform" => "ubuntu",
"omnibus.platform_version" => "14.04",
"omnibus.project" => "chef",
"omnibus.sha1" => "SHA1",
"omnibus.sha256" => "SHA256",
"omnibus.sha512" => "SHA512",
"omnibus.version" => "11.0.6",
"omnibus.license" => "Apache-2.0",
"md5" => "ABCDEF123456",
"sha1" => "SHA1",
"sha256" => "SHA256",
"sha512" => "SHA512",
}
end
let(:metadata_json_properites) do
# we don't attache checksum properties to the *.metadata.json
package_properties.delete_if { |k, v| k =~ /md5|sha/ }
end
let(:build_values) do
{
"build.name" => "chef",
"build.number" => "11.0.6",
}
end
let(:options) { { repository: repository } }
before do
allow(subject).to receive(:client).and_return(client)
allow(subject).to receive(:artifact_for).and_return(artifact)
allow(subject).to receive(:build_for).and_return(build)
allow(package).to receive(:metadata).and_return(metadata)
allow(build).to receive(:save)
end
subject { described_class.new(path, options) }
describe "#publish" do
before do
allow(subject).to receive(:packages).and_return(packages)
Config.artifactory_base_path("com/getchef")
Config.publish_retries(1)
end
it "validates the package" do
expect(package).to receive(:validate!).once
subject.publish
end
it "uploads the package" do
expect(artifact).to receive(:upload).with(
repository,
"com/getchef/chef/11.0.6/ubuntu/14.04/chef.deb",
hash_including(package_properties)
).once
subject.publish
end
it "uploads the package's associated *.metadata.json" do
expect(artifact).to receive(:upload).with(
repository,
"com/getchef/chef/11.0.6/ubuntu/14.04/chef.deb.metadata.json",
hash_including(metadata_json_properites)
).once
subject.publish
end
it "it creates a build record for all packages" do
expect(build).to receive(:save).once
subject.publish
end
context "when no packages exist" do
let(:packages) { [] }
it "does nothing" do
expect(artifact).to_not receive(:upload)
expect(build).to_not receive(:save)
end
end
context "when upload fails" do
before do
Config.publish_retries(3)
# This is really ugly but there is no easy way to stub a method to
# raise an exception a set number of times.
@times = 0
allow(artifact).to receive(:upload) do
@times += 1
raise Artifactory::Error::HTTPError.new("status" => "409", "message" => "CONFLICT") unless @times > 1
end
end
it "retries the upload " do
output = capture_logging { subject.publish }
expect(output).to include("Retrying failed publish")
end
end
context "when a block is given" do
it "yields the package to the block" do
block = ->(package) { package.do_something! }
expect(package).to receive(:do_something!).once
subject.publish(&block)
end
end
context "when the :build_record option is false" do
subject { described_class.new(path, repository: repository, build_record: false) }
it "does not create a build record at the end of publishing" do
expect(build).to_not receive(:save)
subject.publish
end
end
context "additional properties are provided" do
let(:delivery_props) do
{
"delivery.change" => "4dbf38de-3e82-439f-8090-c5f3e11aeba6",
"delivery.sha" => "ec1cb62616350176fc6fd9b1dc4ad3153caa0791",
}
end
let(:options) do
{
properties: delivery_props,
repository: repository,
}
end
it "uploads the package with the provided properties" do
expect(artifact).to receive(:upload).with(
repository,
"com/getchef/chef/11.0.6/ubuntu/14.04/chef.deb",
hash_including(package_properties.merge(delivery_props))
).once
subject.publish
end
end
context "custom artifactory_publish_pattern is set" do
before do
Config.artifactory_publish_pattern("%{platform}/%{platform_version}/%{arch}/%{basename}")
end
it "uploads the package to the provided path" do
expect(artifact).to receive(:upload).with(
repository,
"com/getchef/ubuntu/14.04/x86_64/chef.deb",
hash_including(metadata_json_properites)
).once
subject.publish
end
end
end
describe "#metadata_properties_for" do
it "returns the transformed package metadata values" do
expect(subject.send(:metadata_properties_for, package)).to include(package_properties.merge(build_values))
end
context ":build_record is false" do
let(:options) do
{
build_record: false,
repository: repository,
}
end
it "does not include `build.*` values" do
expect(subject.send(:metadata_properties_for, package)).to include(package_properties)
expect(subject.send(:metadata_properties_for, package)).to_not include(build_values)
end
end
end
end
end
| {
"content_hash": "6e2cb51df4b1dc9051b1d9563944e246",
"timestamp": "",
"source": "github",
"line_count": 287,
"max_line_length": 147,
"avg_line_length": 31.390243902439025,
"alnum_prop": 0.5404595404595405,
"repo_name": "chef/omnibus",
"id": "c98dab47c7be2d05d9bcd5413411c4ca69872200",
"size": "9009",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "spec/unit/publishers/artifactory_publisher_spec.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "9202"
},
{
"name": "HTML",
"bytes": "22844"
},
{
"name": "Ruby",
"bytes": "954649"
},
{
"name": "Shell",
"bytes": "24401"
}
],
"symlink_target": ""
} |
Sentry
======
Sentry provides you with a generic interface to view and interact with your error logs. By
default, it will catch any exception thrown by Django and store it in a database. With this
it allows you to interact and view near real-time information to discover issues and more
easily trace them in your application.
.. toctree::
:maxdepth: 2
install/index
config/index
technical/index
contributing/index
.. image:: http://dl.dropbox.com/u/116385/Screenshots/l6xk.png
Deprecation Notes
-----------------
Milestones releases are 1.3 or 1.4, and our deprecation policy is to a two version step. For example,
a feature will be deprecated in 1.3, and completely removed in 1.4.
| {
"content_hash": "a313eb2e1adb103d0bcc3ac95e1de073",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 101,
"avg_line_length": 30.608695652173914,
"alnum_prop": 0.7414772727272727,
"repo_name": "primepix/django-sentry",
"id": "b755e5b66eb0c3ac23b25042ecd8165fa2ae389b",
"size": "704",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/index.rst",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "20952"
},
{
"name": "JavaScript",
"bytes": "10544"
},
{
"name": "Python",
"bytes": "300510"
},
{
"name": "Shell",
"bytes": "4106"
}
],
"symlink_target": ""
} |
namespace SingleResponsibilityShapesAfter
{
using SingleResponsibilityShapesAfter.Contracts;
public class DrawingManager : IDrawingManager
{
private readonly IDrawingContext drawingContext;
private readonly IRenderer renderer;
public DrawingManager(IDrawingContext drawingContext, IRenderer renderer)
{
this.drawingContext = drawingContext;
this.renderer = renderer;
}
public void Draw(IShape shape)
{
this.renderer.Render(this.drawingContext, shape);
}
}
}
| {
"content_hash": "e798ce4307d2f2ebae17faab21555b30",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 81,
"avg_line_length": 27.523809523809526,
"alnum_prop": 0.6660899653979239,
"repo_name": "DimitarDKirov/High-Quality-Code",
"id": "107cd58bf3ea56689b7bc6e4d60db8c83d779277",
"size": "580",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "16. SOLID and Other Principles/Demo/SOLID and Other Principles/1. Single Responsibility/1. After - Drawing Shape/DrawingManager.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1577305"
},
{
"name": "CSS",
"bytes": "2020"
},
{
"name": "HTML",
"bytes": "70342"
},
{
"name": "JavaScript",
"bytes": "3230"
},
{
"name": "PowerShell",
"bytes": "145"
}
],
"symlink_target": ""
} |
class ContactsController < ApplicationController
#we should probably create the before method for the we use find parameters
before_filter :authenticate_user!
before_filter :find_user
def new
@contact = Contact.new
end
def update
@user = User.find(params[:user_id])
@contact = Contact.find(params[:id])
@contact.update(contact_params)
redirect_to @user
end
def update_active_flag
change_string_to_boolean
@user = User.find(params[:user_id])
@contact = Contact.find(params[:contact_id])
@contact.update(contact_params)
render json: {result: true}, status: 200
end
def delete
@contact = Contact.find(params[:contact_id]).destroy
render json: {result: true}, status: 200
end
def show
end
# trigger the send_message
def sms
@contact = Contact.find(params[:id])
if @user.message && @contact.phone_number
@contact.send_message("#{@user.message} -#{@user.first_name}")
flash[:success] = "Your birthday message is sent!"
redirect_to @user
else
@contact.send_message("Happy Birthday #{@contact.nick_name}! Hope you have a great day -#{@user.first_name}")
flash[:success] = "Default birthday message is sent!"
redirect_to @user
end
end
def create
@user = User.find(params[:user_id])
@contact = Contact.new(contact_params)
@contact.user_id = @user.id
if @contact.save
redirect_to @user
else
render :new
end
end
private
def contact_params
params.require(:contact).permit(:nick_name, :first_name, :last_name, :phone_number, :birthday, :email, :is_active)
end
def find_user
#@user = User.find(params[:user_id])
@user = current_user
end
def change_string_to_boolean
if params[:contact][:is_active] == "false"
params[:contact][:is_active] = false
else
params[:contact][:is_active] = true
end
end
end
| {
"content_hash": "f2e602ec71fc81b56f6a0c78474b78d7",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 120,
"avg_line_length": 23.686746987951807,
"alnum_prop": 0.6378433367243134,
"repo_name": "Berf/berf",
"id": "f83451f6f72a45386658d9c11ab2f0d2591795a6",
"size": "1966",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/controllers/contacts_controller.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "426345"
},
{
"name": "CoffeeScript",
"bytes": "211"
},
{
"name": "HTML",
"bytes": "24434"
},
{
"name": "JavaScript",
"bytes": "417221"
},
{
"name": "Ruby",
"bytes": "60449"
}
],
"symlink_target": ""
} |
Deprecated. Use [https://github.com/fhinkel/InteractiveShell](https://github.com/fhinkel/InteractiveShell) instead.
| {
"content_hash": "3f4be538d87c219aad5a43a4309b55bb",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 117,
"avg_line_length": 61.5,
"alnum_prop": 0.7642276422764228,
"repo_name": "fhinkel/tryM2",
"id": "85188f0faf98a8b73f9a76b0646963e29ff37e75",
"size": "123",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Readme.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16927"
},
{
"name": "HTML",
"bytes": "724891"
},
{
"name": "JavaScript",
"bytes": "86312"
},
{
"name": "Makefile",
"bytes": "1169"
},
{
"name": "Perl",
"bytes": "8217"
},
{
"name": "Shell",
"bytes": "506"
},
{
"name": "TeX",
"bytes": "46298"
}
],
"symlink_target": ""
} |
package org.apache.camel.component.seda;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.camel.Consumer;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.junit.Test;
/**
* @version
*/
public class SedaEndpointTest extends ContextTestSupport {
private BlockingQueue<Exchange> queue = new ArrayBlockingQueue<>(1000);
@Test
public void testSedaEndpointUnboundedQueue() throws Exception {
BlockingQueue<Exchange> unbounded = new LinkedBlockingQueue<>();
SedaEndpoint seda = new SedaEndpoint("seda://foo", context.getComponent("seda"), unbounded);
assertNotNull(seda);
assertEquals(Integer.MAX_VALUE, seda.getSize());
assertSame(unbounded, seda.getQueue());
assertEquals(1, seda.getConcurrentConsumers());
Producer prod = seda.createProducer();
seda.onStarted((SedaProducer) prod);
assertEquals(1, seda.getProducers().size());
Consumer cons = seda.createConsumer(new Processor() {
public void process(Exchange exchange) throws Exception {
// do nothing
}
});
seda.onStarted((SedaConsumer) cons);
assertEquals(1, seda.getConsumers().size());
assertEquals(0, seda.getExchanges().size());
}
@Test
public void testSedaEndpoint() throws Exception {
SedaEndpoint seda = new SedaEndpoint("seda://foo", context.getComponent("seda"), queue);
assertNotNull(seda);
assertEquals(1000, seda.getSize());
assertSame(queue, seda.getQueue());
assertEquals(1, seda.getConcurrentConsumers());
Producer prod = seda.createProducer();
seda.onStarted((SedaProducer) prod);
assertEquals(1, seda.getProducers().size());
Consumer cons = seda.createConsumer(new Processor() {
public void process(Exchange exchange) throws Exception {
// do nothing
}
});
seda.onStarted((SedaConsumer) cons);
assertEquals(1, seda.getConsumers().size());
assertEquals(0, seda.getExchanges().size());
}
@Test
public void testSedaEndpointTwo() throws Exception {
SedaEndpoint seda = new SedaEndpoint("seda://foo", context.getComponent("seda"), queue, 2);
assertNotNull(seda);
assertEquals(1000, seda.getSize());
assertSame(queue, seda.getQueue());
assertEquals(2, seda.getConcurrentConsumers());
Producer prod = seda.createProducer();
seda.onStarted((SedaProducer) prod);
assertEquals(1, seda.getProducers().size());
Consumer cons = seda.createConsumer(new Processor() {
public void process(Exchange exchange) throws Exception {
// do nothing
}
});
seda.onStarted((SedaConsumer) cons);
assertEquals(1, seda.getConsumers().size());
assertEquals(0, seda.getExchanges().size());
}
@Test
public void testSedaEndpointSetQueue() throws Exception {
SedaEndpoint seda = new SedaEndpoint();
assertNotNull(seda);
seda.setCamelContext(context);
seda.setEndpointUriIfNotSpecified("seda://bar");
assertNotNull(seda.getQueue());
// overwrite with a new queue
seda.setQueue(new ArrayBlockingQueue<Exchange>(1000));
seda.setConcurrentConsumers(2);
assertEquals(1000, seda.getSize());
assertNotSame(queue, seda.getQueue());
assertEquals(2, seda.getConcurrentConsumers());
Producer prod = seda.createProducer();
seda.onStarted((SedaProducer) prod);
assertEquals(1, seda.getProducers().size());
Consumer cons = seda.createConsumer(new Processor() {
public void process(Exchange exchange) throws Exception {
// do nothing
}
});
seda.onStarted((SedaConsumer) cons);
assertEquals(1, seda.getConsumers().size());
assertEquals(0, seda.getExchanges().size());
}
@Test
public void testSedaConsumer() throws Exception {
SedaEndpoint seda = context.getEndpoint("seda://foo", SedaEndpoint.class);
Consumer consumer = seda.createConsumer(new Processor() {
public void process(Exchange exchange) throws Exception {
// do nothing
}
});
assertSame(seda, consumer.getEndpoint());
assertNotNull(consumer.toString());
}
@Test
public void testSedaDefaultValue() throws Exception {
SedaComponent sedaComponent = new SedaComponent();
sedaComponent.setQueueSize(300);
sedaComponent.setConcurrentConsumers(3);
context.addComponent("seda", sedaComponent);
SedaEndpoint seda = context.getEndpoint("seda://foo", SedaEndpoint.class);
assertEquals(300, seda.getSize());
assertEquals(3, seda.getConcurrentConsumers());
}
}
| {
"content_hash": "b927c3180ddb852a170456282295bf89",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 108,
"avg_line_length": 34.26,
"alnum_prop": 0.6423428682623078,
"repo_name": "onders86/camel",
"id": "04fdddcc22d0b274565846600a24f1fe966864cc",
"size": "5942",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "camel-core/src/test/java/org/apache/camel/component/seda/SedaEndpointTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Apex",
"bytes": "6519"
},
{
"name": "Batchfile",
"bytes": "6512"
},
{
"name": "CSS",
"bytes": "30373"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "11410"
},
{
"name": "Groovy",
"bytes": "54390"
},
{
"name": "HTML",
"bytes": "190929"
},
{
"name": "Java",
"bytes": "69972191"
},
{
"name": "JavaScript",
"bytes": "90399"
},
{
"name": "Makefile",
"bytes": "513"
},
{
"name": "Python",
"bytes": "36"
},
{
"name": "Ruby",
"bytes": "4802"
},
{
"name": "Scala",
"bytes": "323702"
},
{
"name": "Shell",
"bytes": "23616"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "Thrift",
"bytes": "6979"
},
{
"name": "XQuery",
"bytes": "546"
},
{
"name": "XSLT",
"bytes": "285105"
}
],
"symlink_target": ""
} |
/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.main;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.main.Resilience4jConfigurationProperties;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class Resilience4jConfigurationPropertiesConfigurer extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.main.Resilience4jConfigurationProperties target = (org.apache.camel.main.Resilience4jConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "automatictransitionfromopentohalfopenenabled":
case "AutomaticTransitionFromOpenToHalfOpenEnabled": target.setAutomaticTransitionFromOpenToHalfOpenEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "bulkheadenabled":
case "BulkheadEnabled": target.setBulkheadEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "bulkheadmaxconcurrentcalls":
case "BulkheadMaxConcurrentCalls": target.setBulkheadMaxConcurrentCalls(property(camelContext, java.lang.Integer.class, value)); return true;
case "bulkheadmaxwaitduration":
case "BulkheadMaxWaitDuration": target.setBulkheadMaxWaitDuration(property(camelContext, java.lang.Integer.class, value)); return true;
case "circuitbreakerref":
case "CircuitBreakerRef": target.setCircuitBreakerRef(property(camelContext, java.lang.String.class, value)); return true;
case "configref":
case "ConfigRef": target.setConfigRef(property(camelContext, java.lang.String.class, value)); return true;
case "failureratethreshold":
case "FailureRateThreshold": target.setFailureRateThreshold(property(camelContext, java.lang.Float.class, value)); return true;
case "minimumnumberofcalls":
case "MinimumNumberOfCalls": target.setMinimumNumberOfCalls(property(camelContext, java.lang.Integer.class, value)); return true;
case "permittednumberofcallsinhalfopenstate":
case "PermittedNumberOfCallsInHalfOpenState": target.setPermittedNumberOfCallsInHalfOpenState(property(camelContext, java.lang.Integer.class, value)); return true;
case "slidingwindowsize":
case "SlidingWindowSize": target.setSlidingWindowSize(property(camelContext, java.lang.Integer.class, value)); return true;
case "slidingwindowtype":
case "SlidingWindowType": target.setSlidingWindowType(property(camelContext, java.lang.String.class, value)); return true;
case "slowcalldurationthreshold":
case "SlowCallDurationThreshold": target.setSlowCallDurationThreshold(property(camelContext, java.lang.Integer.class, value)); return true;
case "slowcallratethreshold":
case "SlowCallRateThreshold": target.setSlowCallRateThreshold(property(camelContext, java.lang.Float.class, value)); return true;
case "timeoutcancelrunningfuture":
case "TimeoutCancelRunningFuture": target.setTimeoutCancelRunningFuture(property(camelContext, java.lang.Boolean.class, value)); return true;
case "timeoutduration":
case "TimeoutDuration": target.setTimeoutDuration(property(camelContext, java.lang.Integer.class, value)); return true;
case "timeoutenabled":
case "TimeoutEnabled": target.setTimeoutEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
case "timeoutexecutorserviceref":
case "TimeoutExecutorServiceRef": target.setTimeoutExecutorServiceRef(property(camelContext, java.lang.String.class, value)); return true;
case "waitdurationinopenstate":
case "WaitDurationInOpenState": target.setWaitDurationInOpenState(property(camelContext, java.lang.Integer.class, value)); return true;
case "writablestacktraceenabled":
case "WritableStackTraceEnabled": target.setWritableStackTraceEnabled(property(camelContext, java.lang.Boolean.class, value)); return true;
default: return false;
}
}
}
| {
"content_hash": "df3429fa17103636f1ca69107083fb96",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 185,
"avg_line_length": 71.11475409836065,
"alnum_prop": 0.7648686030428768,
"repo_name": "zregvart/camel",
"id": "a45e87710d9dc933a8554d4c4153af3062028e5f",
"size": "4338",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "core/camel-main/src/generated/java/org/apache/camel/main/Resilience4jConfigurationPropertiesConfigurer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Apex",
"bytes": "6521"
},
{
"name": "Batchfile",
"bytes": "2353"
},
{
"name": "CSS",
"bytes": "5472"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "8015"
},
{
"name": "Groovy",
"bytes": "20938"
},
{
"name": "HTML",
"bytes": "914791"
},
{
"name": "Java",
"bytes": "90321137"
},
{
"name": "JavaScript",
"bytes": "101298"
},
{
"name": "RobotFramework",
"bytes": "8461"
},
{
"name": "Shell",
"bytes": "11165"
},
{
"name": "TSQL",
"bytes": "28835"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "Thrift",
"bytes": "6979"
},
{
"name": "XQuery",
"bytes": "546"
},
{
"name": "XSLT",
"bytes": "280849"
}
],
"symlink_target": ""
} |
<!---
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-->
# Apache Hadoop Changelog
## Release 0.14.3 - 2007-10-19
### INCOMPATIBLE CHANGES:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### IMPORTANT ISSUES:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### NEW FEATURES:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### IMPROVEMENTS:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### BUG FIXES:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
| [HADOOP-2072](https://issues.apache.org/jira/browse/HADOOP-2072) | RawLocalFileStatus is causing Path problems | Major | fs | Dennis Kubes | |
| [HADOOP-2053](https://issues.apache.org/jira/browse/HADOOP-2053) | OutOfMemoryError : Java heap space errors in hadoop 0.14 | Blocker | . | Lohit Vijayarenu | Arun C Murthy |
| [HADOOP-2043](https://issues.apache.org/jira/browse/HADOOP-2043) | 0.14.2 release compiled with Java 1.6 instead of Java 1.5 | Blocker | build | Doug Cutting | Doug Cutting |
| [HADOOP-2036](https://issues.apache.org/jira/browse/HADOOP-2036) | NPE in JvmMetrics.doThreadUpdates | Blocker | metrics | Koji Noguchi | Nigel Daley |
### TESTS:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### SUB-TASKS:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### OTHER:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
| {
"content_hash": "a4f7d6492ba77e1ae711179d2ed4337d",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 177,
"avg_line_length": 35.054794520547944,
"alnum_prop": 0.6193825713169206,
"repo_name": "Ethanlm/hadoop",
"id": "e418744fd3b2831b57d52a7dbf597f4d34ca0112",
"size": "2560",
"binary": false,
"copies": "16",
"ref": "refs/heads/trunk",
"path": "hadoop-common-project/hadoop-common/src/site/markdown/release/0.14.3/CHANGES.0.14.3.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "68758"
},
{
"name": "C",
"bytes": "1406803"
},
{
"name": "C++",
"bytes": "1816626"
},
{
"name": "CMake",
"bytes": "54617"
},
{
"name": "CSS",
"bytes": "58347"
},
{
"name": "HTML",
"bytes": "205485"
},
{
"name": "Java",
"bytes": "64946878"
},
{
"name": "JavaScript",
"bytes": "606717"
},
{
"name": "Protocol Buffer",
"bytes": "269457"
},
{
"name": "Python",
"bytes": "23553"
},
{
"name": "Shell",
"bytes": "380068"
},
{
"name": "TLA",
"bytes": "14993"
},
{
"name": "TeX",
"bytes": "19322"
},
{
"name": "XSLT",
"bytes": "16894"
}
],
"symlink_target": ""
} |
package org.apache.hadoop.hbase;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell.Type;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Consistency;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.master.RegionState;
import org.apache.hadoop.hbase.master.RegionState.State;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService;
import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest;
import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
/**
* <p>
* Read/write operations on region and assignment information store in <code>hbase:meta</code>.
* </p>
* <p>
* Some of the methods of this class take ZooKeeperWatcher as a param. The only reason for this is
* because when used on client-side (like from HBaseAdmin), we want to use short-living connection
* (opened before each operation, closed right after), while when used on HM or HRS (like in
* AssignmentManager) we want permanent connection.
* </p>
* <p>
* HBASE-10070 adds a replicaId to HRI, meaning more than one HRI can be defined for the same table
* range (table, startKey, endKey). For every range, there will be at least one HRI defined which is
* called default replica.
* </p>
* <p>
* Meta layout (as of 0.98 + HBASE-10070) is like:
*
* <pre>
* For each table there is single row in column family 'table' formatted:
* <tableName> including namespace and columns are:
* table: state => contains table state
*
* For each table range, there is a single row, formatted like:
* <tableName>,<startKey>,<regionId>,<encodedRegionName>.
* This row corresponds to the regionName of the default region replica.
* Columns are:
* info:regioninfo => contains serialized HRI for the default region replica
* info:server => contains hostname:port (in string form) for the server hosting
* the default regionInfo replica
* info:server_<replicaId> => contains hostname:port (in string form) for the server hosting
* the regionInfo replica with replicaId
* info:serverstartcode => contains server start code (in binary long form) for the server
* hosting the default regionInfo replica
* info:serverstartcode_<replicaId> => contains server start code (in binary long form) for
* the server hosting the regionInfo replica with
* replicaId
* info:seqnumDuringOpen => contains seqNum (in binary long form) for the region at the time
* the server opened the region with default replicaId
* info:seqnumDuringOpen_<replicaId> => contains seqNum (in binary long form) for the region
* at the time the server opened the region with
* replicaId
* info:splitA => contains a serialized HRI for the first daughter region if the
* region is split
* info:splitB => contains a serialized HRI for the second daughter region if the
* region is split
* info:mergeA => contains a serialized HRI for the first parent region if the
* region is the result of a merge
* info:mergeB => contains a serialized HRI for the second parent region if the
* region is the result of a merge
* </pre>
* </p>
* <p>
* The actual layout of meta should be encapsulated inside MetaTableAccessor methods, and should not
* leak out of it (through Result objects, etc)
* </p>
*/
@InterfaceAudience.Private
public class MetaTableAccessor {
private static final Logger LOG = LoggerFactory.getLogger(MetaTableAccessor.class);
private static final Logger METALOG = LoggerFactory.getLogger("org.apache.hadoop.hbase.META");
@VisibleForTesting
public static final byte[] REPLICATION_PARENT_QUALIFIER = Bytes.toBytes("parent");
private static final byte ESCAPE_BYTE = (byte) 0xFF;
private static final byte SEPARATED_BYTE = 0x00;
/**
* Lists all of the table regions currently in META.
* Deprecated, keep there until some test use this.
* @param connection what we will use
* @param tableName table to list
* @return Map of all user-space regions to servers
* @deprecated use {@link #getTableRegionsAndLocations}, region can have multiple locations
*/
@Deprecated
public static NavigableMap<RegionInfo, ServerName> allTableRegions(
Connection connection, final TableName tableName) throws IOException {
final NavigableMap<RegionInfo, ServerName> regions = new TreeMap<>();
Visitor visitor = new TableVisitorBase(tableName) {
@Override
public boolean visitInternal(Result result) throws IOException {
RegionLocations locations = getRegionLocations(result);
if (locations == null) return true;
for (HRegionLocation loc : locations.getRegionLocations()) {
if (loc != null) {
RegionInfo regionInfo = loc.getRegionInfo();
regions.put(regionInfo, loc.getServerName());
}
}
return true;
}
};
scanMetaForTableRegions(connection, visitor, tableName);
return regions;
}
@InterfaceAudience.Private
public enum QueryType {
ALL(HConstants.TABLE_FAMILY, HConstants.CATALOG_FAMILY),
REGION(HConstants.CATALOG_FAMILY),
TABLE(HConstants.TABLE_FAMILY),
REPLICATION(HConstants.REPLICATION_BARRIER_FAMILY);
private final byte[][] families;
QueryType(byte[]... families) {
this.families = families;
}
byte[][] getFamilies() {
return this.families;
}
}
/** The delimiter for meta columns for replicaIds > 0 */
protected static final char META_REPLICA_ID_DELIMITER = '_';
/** A regex for parsing server columns from meta. See above javadoc for meta layout */
private static final Pattern SERVER_COLUMN_PATTERN
= Pattern.compile("^server(_[0-9a-fA-F]{4})?$");
////////////////////////
// Reading operations //
////////////////////////
/**
* Performs a full scan of <code>hbase:meta</code> for regions.
* @param connection connection we're using
* @param visitor Visitor invoked against each row in regions family.
*/
public static void fullScanRegions(Connection connection,
final Visitor visitor)
throws IOException {
scanMeta(connection, null, null, QueryType.REGION, visitor);
}
/**
* Performs a full scan of <code>hbase:meta</code> for regions.
* @param connection connection we're using
*/
public static List<Result> fullScanRegions(Connection connection)
throws IOException {
return fullScan(connection, QueryType.REGION);
}
/**
* Performs a full scan of <code>hbase:meta</code> for tables.
* @param connection connection we're using
* @param visitor Visitor invoked against each row in tables family.
*/
public static void fullScanTables(Connection connection,
final Visitor visitor)
throws IOException {
scanMeta(connection, null, null, QueryType.TABLE, visitor);
}
/**
* Performs a full scan of <code>hbase:meta</code>.
* @param connection connection we're using
* @param type scanned part of meta
* @return List of {@link Result}
*/
public static List<Result> fullScan(Connection connection, QueryType type)
throws IOException {
CollectAllVisitor v = new CollectAllVisitor();
scanMeta(connection, null, null, type, v);
return v.getResults();
}
/**
* Callers should call close on the returned {@link Table} instance.
* @param connection connection we're using to access Meta
* @return An {@link Table} for <code>hbase:meta</code>
*/
public static Table getMetaHTable(final Connection connection)
throws IOException {
// We used to pass whole CatalogTracker in here, now we just pass in Connection
if (connection == null) {
throw new NullPointerException("No connection");
} else if (connection.isClosed()) {
throw new IOException("connection is closed");
}
return connection.getTable(TableName.META_TABLE_NAME);
}
/**
* @param t Table to use (will be closed when done).
* @param g Get to run
*/
private static Result get(final Table t, final Get g) throws IOException {
if (t == null) return null;
try {
return t.get(g);
} finally {
t.close();
}
}
/**
* Gets the region info and assignment for the specified region.
* @param connection connection we're using
* @param regionName Region to lookup.
* @return Location and RegionInfo for <code>regionName</code>
* @deprecated use {@link #getRegionLocation(Connection, byte[])} instead
*/
@Deprecated
public static Pair<RegionInfo, ServerName> getRegion(Connection connection, byte [] regionName)
throws IOException {
HRegionLocation location = getRegionLocation(connection, regionName);
return location == null
? null
: new Pair<>(location.getRegionInfo(), location.getServerName());
}
/**
* Returns the HRegionLocation from meta for the given region
* @param connection connection we're using
* @param regionName region we're looking for
* @return HRegionLocation for the given region
*/
public static HRegionLocation getRegionLocation(Connection connection, byte[] regionName)
throws IOException {
byte[] row = regionName;
RegionInfo parsedInfo = null;
try {
parsedInfo = parseRegionInfoFromRegionName(regionName);
row = getMetaKeyForRegion(parsedInfo);
} catch (Exception parseEx) {
// Ignore. This is used with tableName passed as regionName.
}
Get get = new Get(row);
get.addFamily(HConstants.CATALOG_FAMILY);
Result r = get(getMetaHTable(connection), get);
RegionLocations locations = getRegionLocations(r);
return locations == null ? null
: locations.getRegionLocation(parsedInfo == null ? 0 : parsedInfo.getReplicaId());
}
/**
* Returns the HRegionLocation from meta for the given region
* @param connection connection we're using
* @param regionInfo region information
* @return HRegionLocation for the given region
*/
public static HRegionLocation getRegionLocation(Connection connection, RegionInfo regionInfo)
throws IOException {
byte[] row = getMetaKeyForRegion(regionInfo);
Get get = new Get(row);
get.addFamily(HConstants.CATALOG_FAMILY);
Result r = get(getMetaHTable(connection), get);
return getRegionLocation(r, regionInfo, regionInfo.getReplicaId());
}
/** Returns the row key to use for this regionInfo */
public static byte[] getMetaKeyForRegion(RegionInfo regionInfo) {
return RegionReplicaUtil.getRegionInfoForDefaultReplica(regionInfo).getRegionName();
}
/** Returns an HRI parsed from this regionName. Not all the fields of the HRI
* is stored in the name, so the returned object should only be used for the fields
* in the regionName.
*/
public static RegionInfo parseRegionInfoFromRegionName(byte[] regionName) throws IOException {
byte[][] fields = RegionInfo.parseRegionName(regionName);
long regionId = Long.parseLong(Bytes.toString(fields[2]));
int replicaId = fields.length > 3 ? Integer.parseInt(Bytes.toString(fields[3]), 16) : 0;
return RegionInfoBuilder.newBuilder(TableName.valueOf(fields[0]))
.setStartKey(fields[1])
.setEndKey(fields[2])
.setSplit(false)
.setRegionId(regionId)
.setReplicaId(replicaId)
.build();
}
/**
* Gets the result in hbase:meta for the specified region.
* @param connection connection we're using
* @param regionName region we're looking for
* @return result of the specified region
*/
public static Result getRegionResult(Connection connection,
byte[] regionName) throws IOException {
Get get = new Get(regionName);
get.addFamily(HConstants.CATALOG_FAMILY);
return get(getMetaHTable(connection), get);
}
/**
* Get regions from the merge qualifier of the specified merged region
* @return null if it doesn't contain merge qualifier, else two merge regions
*/
@Nullable
public static Pair<RegionInfo, RegionInfo> getRegionsFromMergeQualifier(
Connection connection, byte[] regionName) throws IOException {
Result result = getRegionResult(connection, regionName);
RegionInfo mergeA = getRegionInfo(result, HConstants.MERGEA_QUALIFIER);
RegionInfo mergeB = getRegionInfo(result, HConstants.MERGEB_QUALIFIER);
if (mergeA == null && mergeB == null) {
return null;
}
return new Pair<>(mergeA, mergeB);
}
/**
* Checks if the specified table exists. Looks at the hbase:meta table hosted on
* the specified server.
* @param connection connection we're using
* @param tableName table to check
* @return true if the table exists in meta, false if not
*/
public static boolean tableExists(Connection connection,
final TableName tableName)
throws IOException {
// Catalog tables always exist.
return tableName.equals(TableName.META_TABLE_NAME)
|| getTableState(connection, tableName) != null;
}
/**
* Lists all of the regions currently in META.
*
* @param connection to connect with
* @param excludeOfflinedSplitParents False if we are to include offlined/splitparents regions,
* true and we'll leave out offlined regions from returned list
* @return List of all user-space regions.
*/
@VisibleForTesting
public static List<RegionInfo> getAllRegions(Connection connection,
boolean excludeOfflinedSplitParents)
throws IOException {
List<Pair<RegionInfo, ServerName>> result;
result = getTableRegionsAndLocations(connection, null,
excludeOfflinedSplitParents);
return getListOfRegionInfos(result);
}
/**
* Gets all of the regions of the specified table. Do not use this method
* to get meta table regions, use methods in MetaTableLocator instead.
* @param connection connection we're using
* @param tableName table we're looking for
* @return Ordered list of {@link RegionInfo}.
*/
public static List<RegionInfo> getTableRegions(Connection connection, TableName tableName)
throws IOException {
return getTableRegions(connection, tableName, false);
}
/**
* Gets all of the regions of the specified table. Do not use this method
* to get meta table regions, use methods in MetaTableLocator instead.
* @param connection connection we're using
* @param tableName table we're looking for
* @param excludeOfflinedSplitParents If true, do not include offlined split
* parents in the return.
* @return Ordered list of {@link RegionInfo}.
*/
public static List<RegionInfo> getTableRegions(Connection connection, TableName tableName,
final boolean excludeOfflinedSplitParents) throws IOException {
List<Pair<RegionInfo, ServerName>> result =
getTableRegionsAndLocations(connection, tableName, excludeOfflinedSplitParents);
return getListOfRegionInfos(result);
}
private static List<RegionInfo> getListOfRegionInfos(
final List<Pair<RegionInfo, ServerName>> pairs) {
if (pairs == null || pairs.isEmpty()) {
return Collections.emptyList();
}
List<RegionInfo> result = new ArrayList<>(pairs.size());
for (Pair<RegionInfo, ServerName> pair : pairs) {
result.add(pair.getFirst());
}
return result;
}
/**
* @param tableName table we're working with
* @return start row for scanning META according to query type
*/
public static byte[] getTableStartRowForMeta(TableName tableName, QueryType type) {
if (tableName == null) {
return null;
}
switch (type) {
case REGION:
byte[] startRow = new byte[tableName.getName().length + 2];
System.arraycopy(tableName.getName(), 0, startRow, 0, tableName.getName().length);
startRow[startRow.length - 2] = HConstants.DELIMITER;
startRow[startRow.length - 1] = HConstants.DELIMITER;
return startRow;
case ALL:
case TABLE:
default:
return tableName.getName();
}
}
/**
* @param tableName table we're working with
* @return stop row for scanning META according to query type
*/
public static byte[] getTableStopRowForMeta(TableName tableName, QueryType type) {
if (tableName == null) {
return null;
}
final byte[] stopRow;
switch (type) {
case REGION:
stopRow = new byte[tableName.getName().length + 3];
System.arraycopy(tableName.getName(), 0, stopRow, 0, tableName.getName().length);
stopRow[stopRow.length - 3] = ' ';
stopRow[stopRow.length - 2] = HConstants.DELIMITER;
stopRow[stopRow.length - 1] = HConstants.DELIMITER;
break;
case ALL:
case TABLE:
default:
stopRow = new byte[tableName.getName().length + 1];
System.arraycopy(tableName.getName(), 0, stopRow, 0, tableName.getName().length);
stopRow[stopRow.length - 1] = ' ';
break;
}
return stopRow;
}
/**
* This method creates a Scan object that will only scan catalog rows that
* belong to the specified table. It doesn't specify any columns.
* This is a better alternative to just using a start row and scan until
* it hits a new table since that requires parsing the HRI to get the table
* name.
* @param tableName bytes of table's name
* @return configured Scan object
*/
@Deprecated
public static Scan getScanForTableName(Connection connection, TableName tableName) {
// Start key is just the table name with delimiters
byte[] startKey = getTableStartRowForMeta(tableName, QueryType.REGION);
// Stop key appends the smallest possible char to the table name
byte[] stopKey = getTableStopRowForMeta(tableName, QueryType.REGION);
Scan scan = getMetaScan(connection, -1);
scan.setStartRow(startKey);
scan.setStopRow(stopKey);
return scan;
}
private static Scan getMetaScan(Connection connection, int rowUpperLimit) {
Scan scan = new Scan();
int scannerCaching = connection.getConfiguration()
.getInt(HConstants.HBASE_META_SCANNER_CACHING,
HConstants.DEFAULT_HBASE_META_SCANNER_CACHING);
if (connection.getConfiguration().getBoolean(HConstants.USE_META_REPLICAS,
HConstants.DEFAULT_USE_META_REPLICAS)) {
scan.setConsistency(Consistency.TIMELINE);
}
if (rowUpperLimit > 0) {
scan.setLimit(rowUpperLimit);
scan.setReadType(Scan.ReadType.PREAD);
}
scan.setCaching(scannerCaching);
return scan;
}
/**
* Do not use this method to get meta table regions, use methods in MetaTableLocator instead.
* @param connection connection we're using
* @param tableName table we're looking for
* @return Return list of regioninfos and server.
* @throws IOException
*/
public static List<Pair<RegionInfo, ServerName>>
getTableRegionsAndLocations(Connection connection, TableName tableName)
throws IOException {
return getTableRegionsAndLocations(connection, tableName, true);
}
/**
* Do not use this method to get meta table regions, use methods in MetaTableLocator instead.
* @param connection connection we're using
* @param tableName table to work with, can be null for getting all regions
* @param excludeOfflinedSplitParents don't return split parents
* @return Return list of regioninfos and server addresses.
* @throws IOException
*/
public static List<Pair<RegionInfo, ServerName>> getTableRegionsAndLocations(
Connection connection, @Nullable final TableName tableName,
final boolean excludeOfflinedSplitParents) throws IOException {
if (tableName != null && tableName.equals(TableName.META_TABLE_NAME)) {
throw new IOException("This method can't be used to locate meta regions;"
+ " use MetaTableLocator instead");
}
// Make a version of CollectingVisitor that collects RegionInfo and ServerAddress
CollectingVisitor<Pair<RegionInfo, ServerName>> visitor =
new CollectingVisitor<Pair<RegionInfo, ServerName>>() {
private RegionLocations current = null;
@Override
public boolean visit(Result r) throws IOException {
current = getRegionLocations(r);
if (current == null || current.getRegionLocation().getRegion() == null) {
LOG.warn("No serialized RegionInfo in " + r);
return true;
}
RegionInfo hri = current.getRegionLocation().getRegion();
if (excludeOfflinedSplitParents && hri.isSplitParent()) return true;
// Else call super and add this Result to the collection.
return super.visit(r);
}
@Override
void add(Result r) {
if (current == null) {
return;
}
for (HRegionLocation loc : current.getRegionLocations()) {
if (loc != null) {
this.results.add(new Pair<>(loc.getRegion(), loc.getServerName()));
}
}
}
};
scanMeta(connection,
getTableStartRowForMeta(tableName, QueryType.REGION),
getTableStopRowForMeta(tableName, QueryType.REGION),
QueryType.REGION, visitor);
return visitor.getResults();
}
/**
* @param connection connection we're using
* @param serverName server whose regions we're interested in
* @return List of user regions installed on this server (does not include
* catalog regions).
* @throws IOException
*/
public static NavigableMap<RegionInfo, Result>
getServerUserRegions(Connection connection, final ServerName serverName)
throws IOException {
final NavigableMap<RegionInfo, Result> hris = new TreeMap<>();
// Fill the above hris map with entries from hbase:meta that have the passed
// servername.
CollectingVisitor<Result> v = new CollectingVisitor<Result>() {
@Override
void add(Result r) {
if (r == null || r.isEmpty()) return;
RegionLocations locations = getRegionLocations(r);
if (locations == null) return;
for (HRegionLocation loc : locations.getRegionLocations()) {
if (loc != null) {
if (loc.getServerName() != null && loc.getServerName().equals(serverName)) {
hris.put(loc.getRegion(), r);
}
}
}
}
};
scanMeta(connection, null, null, QueryType.REGION, v);
return hris;
}
public static void fullScanMetaAndPrint(Connection connection)
throws IOException {
Visitor v = new Visitor() {
@Override
public boolean visit(Result r) throws IOException {
if (r == null || r.isEmpty()) return true;
LOG.info("fullScanMetaAndPrint.Current Meta Row: " + r);
TableState state = getTableState(r);
if (state != null) {
LOG.info("Table State: " + state);
} else {
RegionLocations locations = getRegionLocations(r);
if (locations == null) return true;
for (HRegionLocation loc : locations.getRegionLocations()) {
if (loc != null) {
LOG.info("fullScanMetaAndPrint.HRI Print= " + loc.getRegion());
}
}
}
return true;
}
};
scanMeta(connection, null, null, QueryType.ALL, v);
}
public static void scanMetaForTableRegions(Connection connection, Visitor visitor,
TableName tableName) throws IOException {
scanMeta(connection, tableName, QueryType.REGION, Integer.MAX_VALUE, visitor);
}
public static void scanMeta(Connection connection, TableName table, QueryType type, int maxRows,
final Visitor visitor) throws IOException {
scanMeta(connection, getTableStartRowForMeta(table, type), getTableStopRowForMeta(table, type),
type, maxRows, visitor);
}
public static void scanMeta(Connection connection, @Nullable final byte[] startRow,
@Nullable final byte[] stopRow, QueryType type, final Visitor visitor) throws IOException {
scanMeta(connection, startRow, stopRow, type, Integer.MAX_VALUE, visitor);
}
/**
* Performs a scan of META table for given table starting from
* given row.
*
* @param connection connection we're using
* @param visitor visitor to call
* @param tableName table withing we scan
* @param row start scan from this row
* @param rowLimit max number of rows to return
*/
public static void scanMeta(Connection connection, final Visitor visitor,
final TableName tableName, final byte[] row, final int rowLimit) throws IOException {
byte[] startRow = null;
byte[] stopRow = null;
if (tableName != null) {
startRow = getTableStartRowForMeta(tableName, QueryType.REGION);
if (row != null) {
RegionInfo closestRi = getClosestRegionInfo(connection, tableName, row);
startRow =
RegionInfo.createRegionName(tableName, closestRi.getStartKey(), HConstants.ZEROES, false);
}
stopRow = getTableStopRowForMeta(tableName, QueryType.REGION);
}
scanMeta(connection, startRow, stopRow, QueryType.REGION, rowLimit, visitor);
}
/**
* Performs a scan of META table.
* @param connection connection we're using
* @param startRow Where to start the scan. Pass null if want to begin scan
* at first row.
* @param stopRow Where to stop the scan. Pass null if want to scan all rows
* from the start one
* @param type scanned part of meta
* @param maxRows maximum rows to return
* @param visitor Visitor invoked against each row.
*/
public static void scanMeta(Connection connection, @Nullable final byte[] startRow,
@Nullable final byte[] stopRow, QueryType type, int maxRows, final Visitor visitor)
throws IOException {
scanMeta(connection, startRow, stopRow, type, null, maxRows, visitor);
}
private static void scanMeta(Connection connection, @Nullable final byte[] startRow,
@Nullable final byte[] stopRow, QueryType type, @Nullable Filter filter, int maxRows,
final Visitor visitor) throws IOException {
int rowUpperLimit = maxRows > 0 ? maxRows : Integer.MAX_VALUE;
Scan scan = getMetaScan(connection, rowUpperLimit);
for (byte[] family : type.getFamilies()) {
scan.addFamily(family);
}
if (startRow != null) {
scan.withStartRow(startRow);
}
if (stopRow != null) {
scan.withStopRow(stopRow);
}
if (filter != null) {
scan.setFilter(filter);
}
if (LOG.isTraceEnabled()) {
LOG.trace("Scanning META" + " starting at row=" + Bytes.toStringBinary(startRow) +
" stopping at row=" + Bytes.toStringBinary(stopRow) + " for max=" + rowUpperLimit +
" with caching=" + scan.getCaching());
}
int currentRow = 0;
try (Table metaTable = getMetaHTable(connection)) {
try (ResultScanner scanner = metaTable.getScanner(scan)) {
Result data;
while ((data = scanner.next()) != null) {
if (data.isEmpty()) continue;
// Break if visit returns false.
if (!visitor.visit(data)) break;
if (++currentRow >= rowUpperLimit) break;
}
}
}
if (visitor != null && visitor instanceof Closeable) {
try {
((Closeable) visitor).close();
} catch (Throwable t) {
ExceptionUtil.rethrowIfInterrupt(t);
LOG.debug("Got exception in closing the meta scanner visitor", t);
}
}
}
/**
* @return Get closest metatable region row to passed <code>row</code>
*/
@NonNull
private static RegionInfo getClosestRegionInfo(Connection connection,
@NonNull final TableName tableName, @NonNull final byte[] row) throws IOException {
byte[] searchRow = RegionInfo.createRegionName(tableName, row, HConstants.NINES, false);
Scan scan = getMetaScan(connection, 1);
scan.setReversed(true);
scan.withStartRow(searchRow);
try (ResultScanner resultScanner = getMetaHTable(connection).getScanner(scan)) {
Result result = resultScanner.next();
if (result == null) {
throw new TableNotFoundException("Cannot find row in META " +
" for table: " + tableName + ", row=" + Bytes.toStringBinary(row));
}
RegionInfo regionInfo = getRegionInfo(result);
if (regionInfo == null) {
throw new IOException("RegionInfo was null or empty in Meta for " +
tableName + ", row=" + Bytes.toStringBinary(row));
}
return regionInfo;
}
}
/**
* Returns the column family used for meta columns.
* @return HConstants.CATALOG_FAMILY.
*/
private static byte[] getCatalogFamily() {
return HConstants.CATALOG_FAMILY;
}
/**
* Returns the column family used for table columns.
* @return HConstants.TABLE_FAMILY.
*/
private static byte[] getTableFamily() {
return HConstants.TABLE_FAMILY;
}
/**
* Returns the column qualifier for serialized region info
* @return HConstants.REGIONINFO_QUALIFIER
*/
private static byte[] getRegionInfoColumn() {
return HConstants.REGIONINFO_QUALIFIER;
}
/**
* Returns the column qualifier for serialized table state
* @return HConstants.TABLE_STATE_QUALIFIER
*/
private static byte[] getTableStateColumn() {
return HConstants.TABLE_STATE_QUALIFIER;
}
/**
* Returns the column qualifier for serialized region state
* @return HConstants.STATE_QUALIFIER
*/
private static byte[] getRegionStateColumn() {
return HConstants.STATE_QUALIFIER;
}
/**
* Returns the column qualifier for serialized region state
* @param replicaId the replicaId of the region
* @return a byte[] for state qualifier
*/
@VisibleForTesting
public static byte[] getRegionStateColumn(int replicaId) {
return replicaId == 0 ? HConstants.STATE_QUALIFIER
: Bytes.toBytes(HConstants.STATE_QUALIFIER_STR + META_REPLICA_ID_DELIMITER
+ String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId));
}
/**
* Returns the column qualifier for serialized region state
* @param replicaId the replicaId of the region
* @return a byte[] for sn column qualifier
*/
@VisibleForTesting
public static byte[] getServerNameColumn(int replicaId) {
return replicaId == 0 ? HConstants.SERVERNAME_QUALIFIER
: Bytes.toBytes(HConstants.SERVERNAME_QUALIFIER_STR + META_REPLICA_ID_DELIMITER
+ String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId));
}
/**
* Returns the column qualifier for server column for replicaId
* @param replicaId the replicaId of the region
* @return a byte[] for server column qualifier
*/
@VisibleForTesting
public static byte[] getServerColumn(int replicaId) {
return replicaId == 0
? HConstants.SERVER_QUALIFIER
: Bytes.toBytes(HConstants.SERVER_QUALIFIER_STR + META_REPLICA_ID_DELIMITER
+ String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId));
}
/**
* Returns the column qualifier for server start code column for replicaId
* @param replicaId the replicaId of the region
* @return a byte[] for server start code column qualifier
*/
@VisibleForTesting
public static byte[] getStartCodeColumn(int replicaId) {
return replicaId == 0
? HConstants.STARTCODE_QUALIFIER
: Bytes.toBytes(HConstants.STARTCODE_QUALIFIER_STR + META_REPLICA_ID_DELIMITER
+ String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId));
}
/**
* Returns the column qualifier for seqNum column for replicaId
* @param replicaId the replicaId of the region
* @return a byte[] for seqNum column qualifier
*/
@VisibleForTesting
public static byte[] getSeqNumColumn(int replicaId) {
return replicaId == 0
? HConstants.SEQNUM_QUALIFIER
: Bytes.toBytes(HConstants.SEQNUM_QUALIFIER_STR + META_REPLICA_ID_DELIMITER
+ String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId));
}
/**
* Parses the replicaId from the server column qualifier. See top of the class javadoc
* for the actual meta layout
* @param serverColumn the column qualifier
* @return an int for the replicaId
*/
@VisibleForTesting
static int parseReplicaIdFromServerColumn(byte[] serverColumn) {
String serverStr = Bytes.toString(serverColumn);
Matcher matcher = SERVER_COLUMN_PATTERN.matcher(serverStr);
if (matcher.matches() && matcher.groupCount() > 0) {
String group = matcher.group(1);
if (group != null && group.length() > 0) {
return Integer.parseInt(group.substring(1), 16);
} else {
return 0;
}
}
return -1;
}
/**
* Returns a {@link ServerName} from catalog table {@link Result}.
* @param r Result to pull from
* @return A ServerName instance or null if necessary fields not found or empty.
*/
@Nullable
@InterfaceAudience.Private // for use by HMaster#getTableRegionRow which is used for testing only
public static ServerName getServerName(final Result r, final int replicaId) {
byte[] serverColumn = getServerColumn(replicaId);
Cell cell = r.getColumnLatestCell(getCatalogFamily(), serverColumn);
if (cell == null || cell.getValueLength() == 0) return null;
String hostAndPort = Bytes.toString(
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
byte[] startcodeColumn = getStartCodeColumn(replicaId);
cell = r.getColumnLatestCell(getCatalogFamily(), startcodeColumn);
if (cell == null || cell.getValueLength() == 0) return null;
try {
return ServerName.valueOf(hostAndPort,
Bytes.toLong(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
} catch (IllegalArgumentException e) {
LOG.error("Ignoring invalid region for server " + hostAndPort + "; cell=" + cell, e);
return null;
}
}
/**
* The latest seqnum that the server writing to meta observed when opening the region.
* E.g. the seqNum when the result of {@link #getServerName(Result, int)} was written.
* @param r Result to pull the seqNum from
* @return SeqNum, or HConstants.NO_SEQNUM if there's no value written.
*/
private static long getSeqNumDuringOpen(final Result r, final int replicaId) {
Cell cell = r.getColumnLatestCell(getCatalogFamily(), getSeqNumColumn(replicaId));
if (cell == null || cell.getValueLength() == 0) return HConstants.NO_SEQNUM;
return Bytes.toLong(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
}
/**
* Returns an HRegionLocationList extracted from the result.
* @return an HRegionLocationList containing all locations for the region range or null if
* we can't deserialize the result.
*/
@Nullable
public static RegionLocations getRegionLocations(final Result r) {
if (r == null) return null;
RegionInfo regionInfo = getRegionInfo(r, getRegionInfoColumn());
if (regionInfo == null) return null;
List<HRegionLocation> locations = new ArrayList<>(1);
NavigableMap<byte[],NavigableMap<byte[],byte[]>> familyMap = r.getNoVersionMap();
locations.add(getRegionLocation(r, regionInfo, 0));
NavigableMap<byte[], byte[]> infoMap = familyMap.get(getCatalogFamily());
if (infoMap == null) return new RegionLocations(locations);
// iterate until all serverName columns are seen
int replicaId = 0;
byte[] serverColumn = getServerColumn(replicaId);
SortedMap<byte[], byte[]> serverMap = null;
serverMap = infoMap.tailMap(serverColumn, false);
if (serverMap.isEmpty()) return new RegionLocations(locations);
for (Map.Entry<byte[], byte[]> entry : serverMap.entrySet()) {
replicaId = parseReplicaIdFromServerColumn(entry.getKey());
if (replicaId < 0) {
break;
}
HRegionLocation location = getRegionLocation(r, regionInfo, replicaId);
// In case the region replica is newly created, it's location might be null. We usually do not
// have HRL's in RegionLocations object with null ServerName. They are handled as null HRLs.
if (location == null || location.getServerName() == null) {
locations.add(null);
} else {
locations.add(location);
}
}
return new RegionLocations(locations);
}
/**
* Returns the HRegionLocation parsed from the given meta row Result
* for the given regionInfo and replicaId. The regionInfo can be the default region info
* for the replica.
* @param r the meta row result
* @param regionInfo RegionInfo for default replica
* @param replicaId the replicaId for the HRegionLocation
* @return HRegionLocation parsed from the given meta row Result for the given replicaId
*/
private static HRegionLocation getRegionLocation(final Result r, final RegionInfo regionInfo,
final int replicaId) {
ServerName serverName = getServerName(r, replicaId);
long seqNum = getSeqNumDuringOpen(r, replicaId);
RegionInfo replicaInfo = RegionReplicaUtil.getRegionInfoForReplica(regionInfo, replicaId);
return new HRegionLocation(replicaInfo, serverName, seqNum);
}
/**
* Returns RegionInfo object from the column
* HConstants.CATALOG_FAMILY:HConstants.REGIONINFO_QUALIFIER of the catalog
* table Result.
* @param data a Result object from the catalog table scan
* @return RegionInfo or null
*/
public static RegionInfo getRegionInfo(Result data) {
return getRegionInfo(data, HConstants.REGIONINFO_QUALIFIER);
}
/**
* Returns the RegionInfo object from the column {@link HConstants#CATALOG_FAMILY} and
* <code>qualifier</code> of the catalog table result.
* @param r a Result object from the catalog table scan
* @param qualifier Column family qualifier
* @return An RegionInfo instance or null.
*/
@Nullable
private static RegionInfo getRegionInfo(final Result r, byte [] qualifier) {
Cell cell = r.getColumnLatestCell(getCatalogFamily(), qualifier);
if (cell == null) return null;
return RegionInfo.parseFromOrNull(cell.getValueArray(),
cell.getValueOffset(), cell.getValueLength());
}
/**
* Returns the daughter regions by reading the corresponding columns of the catalog table
* Result.
* @param data a Result object from the catalog table scan
* @return a pair of RegionInfo or PairOfSameType(null, null) if the region is not a split
* parent
*/
public static PairOfSameType<RegionInfo> getDaughterRegions(Result data) {
RegionInfo splitA = getRegionInfo(data, HConstants.SPLITA_QUALIFIER);
RegionInfo splitB = getRegionInfo(data, HConstants.SPLITB_QUALIFIER);
return new PairOfSameType<>(splitA, splitB);
}
/**
* Returns the merge regions by reading the corresponding columns of the catalog table
* Result.
* @param data a Result object from the catalog table scan
* @return a pair of RegionInfo or PairOfSameType(null, null) if the region is not a split
* parent
*/
public static PairOfSameType<RegionInfo> getMergeRegions(Result data) {
RegionInfo mergeA = getRegionInfo(data, HConstants.MERGEA_QUALIFIER);
RegionInfo mergeB = getRegionInfo(data, HConstants.MERGEB_QUALIFIER);
return new PairOfSameType<>(mergeA, mergeB);
}
/**
* Fetch table state for given table from META table
* @param conn connection to use
* @param tableName table to fetch state for
* @return state
* @throws IOException
*/
@Nullable
public static TableState getTableState(Connection conn, TableName tableName)
throws IOException {
if (tableName.equals(TableName.META_TABLE_NAME)) {
return new TableState(tableName, TableState.State.ENABLED);
}
Table metaHTable = getMetaHTable(conn);
Get get = new Get(tableName.getName()).addColumn(getTableFamily(), getTableStateColumn());
Result result = metaHTable.get(get);
return getTableState(result);
}
/**
* Fetch table states from META table
* @param conn connection to use
* @return map {tableName -> state}
* @throws IOException
*/
public static Map<TableName, TableState> getTableStates(Connection conn)
throws IOException {
final Map<TableName, TableState> states = new LinkedHashMap<>();
Visitor collector = new Visitor() {
@Override
public boolean visit(Result r) throws IOException {
TableState state = getTableState(r);
if (state != null)
states.put(state.getTableName(), state);
return true;
}
};
fullScanTables(conn, collector);
return states;
}
/**
* Updates state in META
* @param conn connection to use
* @param tableName table to look for
* @throws IOException
*/
public static void updateTableState(Connection conn, TableName tableName,
TableState.State actual) throws IOException {
updateTableState(conn, new TableState(tableName, actual));
}
/**
* Decode table state from META Result.
* Should contain cell from HConstants.TABLE_FAMILY
* @param r result
* @return null if not found
*/
@Nullable
public static TableState getTableState(Result r) throws IOException {
Cell cell = r.getColumnLatestCell(getTableFamily(), getTableStateColumn());
if (cell == null) {
return null;
}
try {
return TableState.parseFrom(TableName.valueOf(r.getRow()),
Arrays.copyOfRange(cell.getValueArray(), cell.getValueOffset(),
cell.getValueOffset() + cell.getValueLength()));
} catch (DeserializationException e) {
throw new IOException(e);
}
}
/**
* Implementations 'visit' a catalog table row.
*/
public interface Visitor {
/**
* Visit the catalog table row.
* @param r A row from catalog table
* @return True if we are to proceed scanning the table, else false if
* we are to stop now.
*/
boolean visit(final Result r) throws IOException;
}
/**
* Implementations 'visit' a catalog table row but with close() at the end.
*/
public interface CloseableVisitor extends Visitor, Closeable {
}
/**
* A {@link Visitor} that collects content out of passed {@link Result}.
*/
static abstract class CollectingVisitor<T> implements Visitor {
final List<T> results = new ArrayList<>();
@Override
public boolean visit(Result r) throws IOException {
if (r != null && !r.isEmpty()) {
add(r);
}
return true;
}
abstract void add(Result r);
/**
* @return Collected results; wait till visits complete to collect all
* possible results
*/
List<T> getResults() {
return this.results;
}
}
/**
* Collects all returned.
*/
static class CollectAllVisitor extends CollectingVisitor<Result> {
@Override
void add(Result r) {
this.results.add(r);
}
}
/**
* A Visitor that skips offline regions and split parents
*/
public static abstract class DefaultVisitorBase implements Visitor {
public DefaultVisitorBase() {
super();
}
public abstract boolean visitInternal(Result rowResult) throws IOException;
@Override
public boolean visit(Result rowResult) throws IOException {
RegionInfo info = getRegionInfo(rowResult);
if (info == null) {
return true;
}
//skip over offline and split regions
if (!(info.isOffline() || info.isSplit())) {
return visitInternal(rowResult);
}
return true;
}
}
/**
* A Visitor for a table. Provides a consistent view of the table's
* hbase:meta entries during concurrent splits (see HBASE-5986 for details). This class
* does not guarantee ordered traversal of meta entries, and can block until the
* hbase:meta entries for daughters are available during splits.
*/
public static abstract class TableVisitorBase extends DefaultVisitorBase {
private TableName tableName;
public TableVisitorBase(TableName tableName) {
super();
this.tableName = tableName;
}
@Override
public final boolean visit(Result rowResult) throws IOException {
RegionInfo info = getRegionInfo(rowResult);
if (info == null) {
return true;
}
if (!(info.getTable().equals(tableName))) {
return false;
}
return super.visit(rowResult);
}
}
/**
* Count regions in <code>hbase:meta</code> for passed table.
* @param c Configuration object
* @param tableName table name to count regions for
* @return Count or regions in table <code>tableName</code>
*/
public static int getRegionCount(final Configuration c, final TableName tableName)
throws IOException {
try (Connection connection = ConnectionFactory.createConnection(c)) {
return getRegionCount(connection, tableName);
}
}
/**
* Count regions in <code>hbase:meta</code> for passed table.
* @param connection Connection object
* @param tableName table name to count regions for
* @return Count or regions in table <code>tableName</code>
*/
public static int getRegionCount(final Connection connection, final TableName tableName)
throws IOException {
try (RegionLocator locator = connection.getRegionLocator(tableName)) {
List<HRegionLocation> locations = locator.getAllRegionLocations();
return locations == null ? 0 : locations.size();
}
}
////////////////////////
// Editing operations //
////////////////////////
/**
* Generates and returns a Put containing the region into for the catalog table
*/
public static Put makePutFromRegionInfo(RegionInfo regionInfo, long ts) throws IOException {
Put put = new Put(regionInfo.getRegionName(), ts);
addRegionInfo(put, regionInfo);
return put;
}
/**
* Generates and returns a Delete containing the region info for the catalog
* table
*/
private static Delete makeDeleteFromRegionInfo(RegionInfo regionInfo, long ts) {
if (regionInfo == null) {
throw new IllegalArgumentException("Can't make a delete for null region");
}
Delete delete = new Delete(regionInfo.getRegionName());
delete.addFamily(getCatalogFamily(), ts);
return delete;
}
/**
* Adds split daughters to the Put
*/
public static Put addDaughtersToPut(Put put, RegionInfo splitA, RegionInfo splitB)
throws IOException {
if (splitA != null) {
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(put.getRow())
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.SPLITA_QUALIFIER)
.setTimestamp(put.getTimestamp())
.setType(Type.Put)
.setValue(RegionInfo.toByteArray(splitA))
.build());
}
if (splitB != null) {
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(put.getRow())
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.SPLITB_QUALIFIER)
.setTimestamp(put.getTimestamp())
.setType(Type.Put)
.setValue(RegionInfo.toByteArray(splitB))
.build());
}
return put;
}
/**
* Put the passed <code>p</code> to the <code>hbase:meta</code> table.
* @param connection connection we're using
* @param p Put to add to hbase:meta
*/
private static void putToMetaTable(Connection connection, Put p) throws IOException {
try (Table table = getMetaHTable(connection)) {
put(table, p);
}
}
/**
* @param t Table to use
* @param p put to make
*/
private static void put(Table t, Put p) throws IOException {
debugLogMutation(p);
t.put(p);
}
/**
* Put the passed <code>ps</code> to the <code>hbase:meta</code> table.
* @param connection connection we're using
* @param ps Put to add to hbase:meta
*/
public static void putsToMetaTable(final Connection connection, final List<Put> ps)
throws IOException {
if (ps.isEmpty()) {
return;
}
try (Table t = getMetaHTable(connection)) {
debugLogMutations(ps);
// the implementation for putting a single Put is much simpler so here we do a check first.
if (ps.size() == 1) {
t.put(ps.get(0));
} else {
t.put(ps);
}
}
}
/**
* Delete the passed <code>d</code> from the <code>hbase:meta</code> table.
* @param connection connection we're using
* @param d Delete to add to hbase:meta
*/
private static void deleteFromMetaTable(final Connection connection, final Delete d)
throws IOException {
List<Delete> dels = new ArrayList<>(1);
dels.add(d);
deleteFromMetaTable(connection, dels);
}
/**
* Delete the passed <code>deletes</code> from the <code>hbase:meta</code> table.
* @param connection connection we're using
* @param deletes Deletes to add to hbase:meta This list should support #remove.
*/
private static void deleteFromMetaTable(final Connection connection, final List<Delete> deletes)
throws IOException {
try (Table t = getMetaHTable(connection)) {
debugLogMutations(deletes);
t.delete(deletes);
}
}
/**
* Deletes some replica columns corresponding to replicas for the passed rows
* @param metaRows rows in hbase:meta
* @param replicaIndexToDeleteFrom the replica ID we would start deleting from
* @param numReplicasToRemove how many replicas to remove
* @param connection connection we're using to access meta table
*/
public static void removeRegionReplicasFromMeta(Set<byte[]> metaRows,
int replicaIndexToDeleteFrom, int numReplicasToRemove, Connection connection)
throws IOException {
int absoluteIndex = replicaIndexToDeleteFrom + numReplicasToRemove;
for (byte[] row : metaRows) {
long now = EnvironmentEdgeManager.currentTime();
Delete deleteReplicaLocations = new Delete(row);
for (int i = replicaIndexToDeleteFrom; i < absoluteIndex; i++) {
deleteReplicaLocations.addColumns(getCatalogFamily(),
getServerColumn(i), now);
deleteReplicaLocations.addColumns(getCatalogFamily(),
getSeqNumColumn(i), now);
deleteReplicaLocations.addColumns(getCatalogFamily(),
getStartCodeColumn(i), now);
deleteReplicaLocations.addColumns(getCatalogFamily(), getServerNameColumn(i), now);
deleteReplicaLocations.addColumns(getCatalogFamily(), getRegionStateColumn(i), now);
}
deleteFromMetaTable(connection, deleteReplicaLocations);
}
}
/**
* Execute the passed <code>mutations</code> against <code>hbase:meta</code> table.
* @param connection connection we're using
* @param mutations Puts and Deletes to execute on hbase:meta
* @throws IOException
*/
public static void mutateMetaTable(final Connection connection,
final List<Mutation> mutations)
throws IOException {
Table t = getMetaHTable(connection);
try {
debugLogMutations(mutations);
t.batch(mutations, null);
} catch (InterruptedException e) {
InterruptedIOException ie = new InterruptedIOException(e.getMessage());
ie.initCause(e);
throw ie;
} finally {
t.close();
}
}
private static void addRegionStateToPut(Put put, RegionState.State state) throws IOException {
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(put.getRow())
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(getRegionStateColumn())
.setTimestamp(put.getTimestamp())
.setType(Cell.Type.Put)
.setValue(Bytes.toBytes(state.name()))
.build());
}
/**
* Adds daughter region infos to hbase:meta row for the specified region. Note that this does not
* add its daughter's as different rows, but adds information about the daughters in the same row
* as the parent. Use
* {@link #splitRegion(Connection, RegionInfo, long, RegionInfo, RegionInfo, ServerName, int)}
* if you want to do that.
* @param connection connection we're using
* @param regionInfo RegionInfo of parent region
* @param splitA first split daughter of the parent regionInfo
* @param splitB second split daughter of the parent regionInfo
* @throws IOException if problem connecting or updating meta
*/
public static void addSplitsToParent(Connection connection, RegionInfo regionInfo,
RegionInfo splitA, RegionInfo splitB) throws IOException {
Table meta = getMetaHTable(connection);
try {
Put put = makePutFromRegionInfo(regionInfo, EnvironmentEdgeManager.currentTime());
addDaughtersToPut(put, splitA, splitB);
meta.put(put);
debugLogMutation(put);
LOG.debug("Added region {}", regionInfo.getRegionNameAsString());
} finally {
meta.close();
}
}
/**
* Adds a (single) hbase:meta row for the specified new region and its daughters. Note that this
* does not add its daughter's as different rows, but adds information about the daughters
* in the same row as the parent. Use
* {@link #splitRegion(Connection, RegionInfo, long, RegionInfo, RegionInfo, ServerName, int)}
* if you want to do that.
* @param connection connection we're using
* @param regionInfo region information
* @throws IOException if problem connecting or updating meta
*/
@VisibleForTesting
public static void addRegionToMeta(Connection connection, RegionInfo regionInfo)
throws IOException {
addRegionsToMeta(connection, Collections.singletonList(regionInfo), 1);
}
/**
* Adds a hbase:meta row for each of the specified new regions. Initial state for new regions
* is CLOSED.
* @param connection connection we're using
* @param regionInfos region information list
* @throws IOException if problem connecting or updating meta
*/
public static void addRegionsToMeta(Connection connection, List<RegionInfo> regionInfos,
int regionReplication) throws IOException {
addRegionsToMeta(connection, regionInfos, regionReplication,
EnvironmentEdgeManager.currentTime());
}
/**
* Adds a hbase:meta row for each of the specified new regions. Initial state for new regions
* is CLOSED.
* @param connection connection we're using
* @param regionInfos region information list
* @param regionReplication
* @param ts desired timestamp
* @throws IOException if problem connecting or updating meta
*/
private static void addRegionsToMeta(Connection connection, List<RegionInfo> regionInfos,
int regionReplication, long ts) throws IOException {
List<Put> puts = new ArrayList<>();
for (RegionInfo regionInfo : regionInfos) {
if (RegionReplicaUtil.isDefaultReplica(regionInfo)) {
Put put = makePutFromRegionInfo(regionInfo, ts);
// New regions are added with initial state of CLOSED.
addRegionStateToPut(put, RegionState.State.CLOSED);
// Add empty locations for region replicas so that number of replicas can be cached
// whenever the primary region is looked up from meta
for (int i = 1; i < regionReplication; i++) {
addEmptyLocation(put, i);
}
puts.add(put);
}
}
putsToMetaTable(connection, puts);
LOG.info("Added {} regions to meta.", puts.size());
}
/**
* Merge the two regions into one in an atomic operation. Deletes the two merging regions in
* hbase:meta and adds the merged region with the information of two merging regions.
* @param connection connection we're using
* @param mergedRegion the merged region
* @param regionA merge parent region A
* @param regionAOpenSeqNum the next open sequence id for region A, used by serial replication. -1
* if not necessary.
* @param regionB merge parent region B
* @param regionBOpenSeqNum the next open sequence id for region B, used by serial replication. -1
* if not necessary.
* @param sn the location of the region
*/
public static void mergeRegions(Connection connection, RegionInfo mergedRegion,
RegionInfo regionA, long regionAOpenSeqNum, RegionInfo regionB, long regionBOpenSeqNum,
ServerName sn, int regionReplication) throws IOException {
try (Table meta = getMetaHTable(connection)) {
long time = EnvironmentEdgeManager.currentTime();
List<Mutation> mutations = new ArrayList<>();
List<RegionInfo> replicationParents = new ArrayList<>(2);
// Deletes for merging regions
mutations.add(makeDeleteFromRegionInfo(regionA, time));
if (regionAOpenSeqNum > 0) {
mutations.add(makePutForReplicationBarrier(regionA, regionAOpenSeqNum, time));
replicationParents.add(regionA);
}
mutations.add(makeDeleteFromRegionInfo(regionB, time));
if (regionBOpenSeqNum > 0) {
mutations.add(makePutForReplicationBarrier(regionB, regionBOpenSeqNum, time));
replicationParents.add(regionB);
}
// Put for parent
Put putOfMerged = makePutFromRegionInfo(mergedRegion, time);
putOfMerged.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(putOfMerged.getRow())
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.MERGEA_QUALIFIER)
.setTimestamp(putOfMerged.getTimestamp())
.setType(Type.Put)
.setValue(RegionInfo.toByteArray(regionA))
.build())
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(putOfMerged.getRow())
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.MERGEB_QUALIFIER)
.setTimestamp(putOfMerged.getTimestamp())
.setType(Type.Put)
.setValue(RegionInfo.toByteArray(regionB))
.build());
// Set initial state to CLOSED
// NOTE: If initial state is not set to CLOSED then merged region gets added with the
// default OFFLINE state. If Master gets restarted after this step, start up sequence of
// master tries to assign this offline region. This is followed by re-assignments of the
// merged region from resumed {@link MergeTableRegionsProcedure}
addRegionStateToPut(putOfMerged, RegionState.State.CLOSED);
mutations.add(putOfMerged);
// The merged is a new region, openSeqNum = 1 is fine. ServerName may be null
// if crash after merge happened but before we got to here.. means in-memory
// locations of offlined merged, now-closed, regions is lost. Should be ok. We
// assign the merged region later.
if (sn != null) {
addLocation(putOfMerged, sn, 1, mergedRegion.getReplicaId());
}
// Add empty locations for region replicas of the merged region so that number of replicas can
// be cached whenever the primary region is looked up from meta
for (int i = 1; i < regionReplication; i++) {
addEmptyLocation(putOfMerged, i);
}
// add parent reference for serial replication
if (!replicationParents.isEmpty()) {
addReplicationParent(putOfMerged, replicationParents);
}
byte[] tableRow = Bytes.toBytes(mergedRegion.getRegionNameAsString() + HConstants.DELIMITER);
multiMutate(connection, meta, tableRow, mutations);
}
}
/**
* Splits the region into two in an atomic operation. Offlines the parent region with the
* information that it is split into two, and also adds the daughter regions. Does not add the
* location information to the daughter regions since they are not open yet.
* @param connection connection we're using
* @param parent the parent region which is split
* @param parentOpenSeqNum the next open sequence id for parent region, used by serial
* replication. -1 if not necessary.
* @param splitA Split daughter region A
* @param splitB Split daughter region B
* @param sn the location of the region
*/
public static void splitRegion(Connection connection, RegionInfo parent, long parentOpenSeqNum,
RegionInfo splitA, RegionInfo splitB, ServerName sn, int regionReplication)
throws IOException {
try (Table meta = getMetaHTable(connection)) {
long time = EnvironmentEdgeManager.currentTime();
// Put for parent
Put putParent = makePutFromRegionInfo(RegionInfoBuilder.newBuilder(parent)
.setOffline(true)
.setSplit(true).build(), time);
addDaughtersToPut(putParent, splitA, splitB);
// Puts for daughters
Put putA = makePutFromRegionInfo(splitA, time);
Put putB = makePutFromRegionInfo(splitB, time);
if (parentOpenSeqNum > 0) {
addReplicationBarrier(putParent, parentOpenSeqNum);
addReplicationParent(putA, Collections.singletonList(parent));
addReplicationParent(putB, Collections.singletonList(parent));
}
// Set initial state to CLOSED
// NOTE: If initial state is not set to CLOSED then daughter regions get added with the
// default OFFLINE state. If Master gets restarted after this step, start up sequence of
// master tries to assign these offline regions. This is followed by re-assignments of the
// daughter regions from resumed {@link SplitTableRegionProcedure}
addRegionStateToPut(putA, RegionState.State.CLOSED);
addRegionStateToPut(putB, RegionState.State.CLOSED);
addSequenceNum(putA, 1, splitA.getReplicaId()); // new regions, openSeqNum = 1 is fine.
addSequenceNum(putB, 1, splitB.getReplicaId());
// Add empty locations for region replicas of daughters so that number of replicas can be
// cached whenever the primary region is looked up from meta
for (int i = 1; i < regionReplication; i++) {
addEmptyLocation(putA, i);
addEmptyLocation(putB, i);
}
byte[] tableRow = Bytes.toBytes(parent.getRegionNameAsString() + HConstants.DELIMITER);
multiMutate(connection, meta, tableRow, putParent, putA, putB);
}
}
/**
* Update state of the table in meta.
* @param connection what we use for update
* @param state new state
*/
private static void updateTableState(Connection connection, TableState state) throws IOException {
Put put = makePutFromTableState(state, EnvironmentEdgeManager.currentTime());
putToMetaTable(connection, put);
LOG.info("Updated {} in hbase:meta", state);
}
/**
* Construct PUT for given state
* @param state new state
*/
public static Put makePutFromTableState(TableState state, long ts) {
Put put = new Put(state.getTableName().getName(), ts);
put.addColumn(getTableFamily(), getTableStateColumn(), state.convert().toByteArray());
return put;
}
/**
* Remove state for table from meta
* @param connection to use for deletion
* @param table to delete state for
*/
public static void deleteTableState(Connection connection, TableName table)
throws IOException {
long time = EnvironmentEdgeManager.currentTime();
Delete delete = new Delete(table.getName());
delete.addColumns(getTableFamily(), getTableStateColumn(), time);
deleteFromMetaTable(connection, delete);
LOG.info("Deleted table " + table + " state from META");
}
private static void multiMutate(Connection connection, Table table, byte[] row,
Mutation... mutations) throws IOException {
multiMutate(connection, table, row, Arrays.asList(mutations));
}
/**
* Performs an atomic multi-mutate operation against the given table.
*/
private static void multiMutate(Connection connection, final Table table, byte[] row,
final List<Mutation> mutations) throws IOException {
debugLogMutations(mutations);
Batch.Call<MultiRowMutationService, MutateRowsResponse> callable =
new Batch.Call<MultiRowMutationService, MutateRowsResponse>() {
@Override
public MutateRowsResponse call(MultiRowMutationService instance) throws IOException {
MutateRowsRequest.Builder builder = MutateRowsRequest.newBuilder();
for (Mutation mutation : mutations) {
if (mutation instanceof Put) {
builder.addMutationRequest(
ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.PUT, mutation));
} else if (mutation instanceof Delete) {
builder.addMutationRequest(
ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.DELETE, mutation));
} else {
throw new DoNotRetryIOException(
"multi in MetaEditor doesn't support " + mutation.getClass().getName());
}
}
ServerRpcController controller = new ServerRpcController();
CoprocessorRpcUtils.BlockingRpcCallback<MutateRowsResponse> rpcCallback =
new CoprocessorRpcUtils.BlockingRpcCallback<>();
instance.mutateRows(controller, builder.build(), rpcCallback);
MutateRowsResponse resp = rpcCallback.get();
if (controller.failedOnException()) {
throw controller.getFailedOn();
}
return resp;
}
};
try {
table.coprocessorService(MultiRowMutationService.class, row, row, callable);
} catch (Throwable e) {
Throwables.propagateIfPossible(e, IOException.class);
throw new IOException(e);
}
}
/**
* Updates the location of the specified region in hbase:meta to be the specified server hostname
* and startcode.
* <p>
* Uses passed catalog tracker to get a connection to the server hosting hbase:meta and makes
* edits to that region.
* @param connection connection we're using
* @param regionInfo region to update location of
* @param openSeqNum the latest sequence number obtained when the region was open
* @param sn Server name
* @param masterSystemTime wall clock time from master if passed in the open region RPC
*/
@VisibleForTesting
public static void updateRegionLocation(Connection connection, RegionInfo regionInfo,
ServerName sn, long openSeqNum, long masterSystemTime) throws IOException {
updateLocation(connection, regionInfo, sn, openSeqNum, masterSystemTime);
}
/**
* Updates the location of the specified region to be the specified server.
* <p>
* Connects to the specified server which should be hosting the specified catalog region name to
* perform the edit.
* @param connection connection we're using
* @param regionInfo region to update location of
* @param sn Server name
* @param openSeqNum the latest sequence number obtained when the region was open
* @param masterSystemTime wall clock time from master if passed in the open region RPC
* @throws IOException In particular could throw {@link java.net.ConnectException} if the server
* is down on other end.
*/
private static void updateLocation(Connection connection, RegionInfo regionInfo, ServerName sn,
long openSeqNum, long masterSystemTime) throws IOException {
// region replicas are kept in the primary region's row
Put put = new Put(getMetaKeyForRegion(regionInfo), masterSystemTime);
addRegionInfo(put, regionInfo);
addLocation(put, sn, openSeqNum, regionInfo.getReplicaId());
putToMetaTable(connection, put);
LOG.info("Updated row {} with server=", regionInfo.getRegionNameAsString(), sn);
}
/**
* Deletes the specified region from META.
* @param connection connection we're using
* @param regionInfo region to be deleted from META
* @throws IOException
*/
public static void deleteRegion(Connection connection, RegionInfo regionInfo) throws IOException {
long time = EnvironmentEdgeManager.currentTime();
Delete delete = new Delete(regionInfo.getRegionName());
delete.addFamily(getCatalogFamily(), time);
deleteFromMetaTable(connection, delete);
LOG.info("Deleted " + regionInfo.getRegionNameAsString());
}
/**
* Deletes the specified regions from META.
* @param connection connection we're using
* @param regionsInfo list of regions to be deleted from META
*/
public static void deleteRegions(Connection connection, List<RegionInfo> regionsInfo)
throws IOException {
deleteRegions(connection, regionsInfo, EnvironmentEdgeManager.currentTime());
}
/**
* Deletes the specified regions from META.
* @param connection connection we're using
* @param regionsInfo list of regions to be deleted from META
*/
public static void deleteRegions(Connection connection, List<RegionInfo> regionsInfo, long ts)
throws IOException {
List<Delete> deletes = new ArrayList<>(regionsInfo.size());
for (RegionInfo hri : regionsInfo) {
Delete e = new Delete(hri.getRegionName());
e.addFamily(getCatalogFamily(), ts);
deletes.add(e);
}
deleteFromMetaTable(connection, deletes);
LOG.info("Deleted {} regions from META", regionsInfo.size());
LOG.debug("Deleted regions: {}", regionsInfo);
}
/**
* Overwrites the specified regions from hbase:meta. Deletes old rows for the given regions and
* adds new ones. Regions added back have state CLOSED.
* @param connection connection we're using
* @param regionInfos list of regions to be added to META
*/
public static void overwriteRegions(Connection connection,
List<RegionInfo> regionInfos, int regionReplication) throws IOException {
// use master time for delete marker and the Put
long now = EnvironmentEdgeManager.currentTime();
deleteRegions(connection, regionInfos, now);
// Why sleep? This is the easiest way to ensure that the previous deletes does not
// eclipse the following puts, that might happen in the same ts from the server.
// See HBASE-9906, and HBASE-9879. Once either HBASE-9879, HBASE-8770 is fixed,
// or HBASE-9905 is fixed and meta uses seqIds, we do not need the sleep.
//
// HBASE-13875 uses master timestamp for the mutations. The 20ms sleep is not needed
addRegionsToMeta(connection, regionInfos, regionReplication, now + 1);
LOG.info("Overwritten " + regionInfos.size() + " regions to Meta");
LOG.debug("Overwritten regions: {} ", regionInfos);
}
/**
* Deletes merge qualifiers for the specified merged region.
* @param connection connection we're using
* @param mergedRegion the merged region
*/
public static void deleteMergeQualifiers(Connection connection, final RegionInfo mergedRegion)
throws IOException {
long time = EnvironmentEdgeManager.currentTime();
Delete delete = new Delete(mergedRegion.getRegionName());
delete.addColumns(getCatalogFamily(), HConstants.MERGEA_QUALIFIER, time);
delete.addColumns(getCatalogFamily(), HConstants.MERGEB_QUALIFIER, time);
deleteFromMetaTable(connection, delete);
LOG.info("Deleted references in merged region "
+ mergedRegion.getRegionNameAsString() + ", qualifier="
+ Bytes.toStringBinary(HConstants.MERGEA_QUALIFIER) + " and qualifier="
+ Bytes.toStringBinary(HConstants.MERGEB_QUALIFIER));
}
public static Put addRegionInfo(final Put p, final RegionInfo hri)
throws IOException {
p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(HConstants.REGIONINFO_QUALIFIER)
.setTimestamp(p.getTimestamp())
.setType(Type.Put)
.setValue(RegionInfo.toByteArray(hri))
.build());
return p;
}
public static Put addLocation(Put p, ServerName sn, long openSeqNum, int replicaId)
throws IOException {
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
return p.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getServerColumn(replicaId))
.setTimestamp(p.getTimestamp())
.setType(Cell.Type.Put)
.setValue(Bytes.toBytes(sn.getAddress().toString()))
.build())
.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getStartCodeColumn(replicaId))
.setTimestamp(p.getTimestamp())
.setType(Cell.Type.Put)
.setValue(Bytes.toBytes(sn.getStartcode()))
.build())
.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(p.getTimestamp())
.setType(Type.Put)
.setValue(Bytes.toBytes(openSeqNum))
.build());
}
private static void writeRegionName(ByteArrayOutputStream out, byte[] regionName) {
for (byte b : regionName) {
if (b == ESCAPE_BYTE) {
out.write(ESCAPE_BYTE);
}
out.write(b);
}
}
@VisibleForTesting
public static byte[] getParentsBytes(List<RegionInfo> parents) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
Iterator<RegionInfo> iter = parents.iterator();
writeRegionName(bos, iter.next().getRegionName());
while (iter.hasNext()) {
bos.write(ESCAPE_BYTE);
bos.write(SEPARATED_BYTE);
writeRegionName(bos, iter.next().getRegionName());
}
return bos.toByteArray();
}
private static List<byte[]> parseParentsBytes(byte[] bytes) {
List<byte[]> parents = new ArrayList<>();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
for (int i = 0; i < bytes.length; i++) {
if (bytes[i] == ESCAPE_BYTE) {
i++;
if (bytes[i] == SEPARATED_BYTE) {
parents.add(bos.toByteArray());
bos.reset();
continue;
}
// fall through to append the byte
}
bos.write(bytes[i]);
}
if (bos.size() > 0) {
parents.add(bos.toByteArray());
}
return parents;
}
private static void addReplicationParent(Put put, List<RegionInfo> parents) throws IOException {
byte[] value = getParentsBytes(parents);
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(put.getRow())
.setFamily(HConstants.REPLICATION_BARRIER_FAMILY).setQualifier(REPLICATION_PARENT_QUALIFIER)
.setTimestamp(put.getTimestamp()).setType(Type.Put).setValue(value).build());
}
public static Put makePutForReplicationBarrier(RegionInfo regionInfo, long openSeqNum, long ts)
throws IOException {
Put put = new Put(regionInfo.getRegionName(), ts);
addReplicationBarrier(put, openSeqNum);
return put;
}
public static void addReplicationBarrier(Put put, long openSeqNum) throws IOException {
put.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(put.getRow())
.setFamily(HConstants.REPLICATION_BARRIER_FAMILY)
.setQualifier(HConstants.SEQNUM_QUALIFIER)
.setTimestamp(put.getTimestamp())
.setType(Type.Put)
.setValue(Bytes.toBytes(openSeqNum))
.build());
}
private static Put addEmptyLocation(Put p, int replicaId) throws IOException {
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
return p.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getServerColumn(replicaId))
.setTimestamp(p.getTimestamp())
.setType(Type.Put)
.build())
.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getStartCodeColumn(replicaId))
.setTimestamp(p.getTimestamp())
.setType(Cell.Type.Put)
.build())
.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(p.getTimestamp())
.setType(Cell.Type.Put)
.build());
}
public static final class ReplicationBarrierResult {
private final long[] barriers;
private final RegionState.State state;
private final List<byte[]> parentRegionNames;
public ReplicationBarrierResult(long[] barriers, State state, List<byte[]> parentRegionNames) {
this.barriers = barriers;
this.state = state;
this.parentRegionNames = parentRegionNames;
}
public long[] getBarriers() {
return barriers;
}
public RegionState.State getState() {
return state;
}
public List<byte[]> getParentRegionNames() {
return parentRegionNames;
}
@Override
public String toString() {
return "ReplicationBarrierResult [barriers=" + Arrays.toString(barriers) + ", state=" +
state + ", parentRegionNames=" +
parentRegionNames.stream().map(Bytes::toStringBinary).collect(Collectors.joining(", ")) +
"]";
}
}
private static long getReplicationBarrier(Cell c) {
return Bytes.toLong(c.getValueArray(), c.getValueOffset(), c.getValueLength());
}
public static long[] getReplicationBarriers(Result result) {
return result.getColumnCells(HConstants.REPLICATION_BARRIER_FAMILY, HConstants.SEQNUM_QUALIFIER)
.stream().mapToLong(MetaTableAccessor::getReplicationBarrier).sorted().distinct().toArray();
}
private static ReplicationBarrierResult getReplicationBarrierResult(Result result) {
long[] barriers = getReplicationBarriers(result);
byte[] stateBytes = result.getValue(getCatalogFamily(), getRegionStateColumn());
RegionState.State state =
stateBytes != null ? RegionState.State.valueOf(Bytes.toString(stateBytes)) : null;
byte[] parentRegionsBytes =
result.getValue(HConstants.REPLICATION_BARRIER_FAMILY, REPLICATION_PARENT_QUALIFIER);
List<byte[]> parentRegionNames =
parentRegionsBytes != null ? parseParentsBytes(parentRegionsBytes) : Collections.emptyList();
return new ReplicationBarrierResult(barriers, state, parentRegionNames);
}
public static ReplicationBarrierResult getReplicationBarrierResult(Connection conn,
TableName tableName, byte[] row, byte[] encodedRegionName) throws IOException {
byte[] metaStartKey = RegionInfo.createRegionName(tableName, row, HConstants.NINES, false);
byte[] metaStopKey =
RegionInfo.createRegionName(tableName, HConstants.EMPTY_START_ROW, "", false);
Scan scan = new Scan().withStartRow(metaStartKey).withStopRow(metaStopKey)
.addColumn(getCatalogFamily(), getRegionStateColumn())
.addFamily(HConstants.REPLICATION_BARRIER_FAMILY).readAllVersions().setReversed(true)
.setCaching(10);
try (Table table = getMetaHTable(conn); ResultScanner scanner = table.getScanner(scan)) {
for (Result result;;) {
result = scanner.next();
if (result == null) {
return new ReplicationBarrierResult(new long[0], null, Collections.emptyList());
}
byte[] regionName = result.getRow();
// TODO: we may look up a region which has already been split or merged so we need to check
// whether the encoded name matches. Need to find a way to quit earlier when there is no
// record for the given region, for now it will scan to the end of the table.
if (!Bytes.equals(encodedRegionName,
Bytes.toBytes(RegionInfo.encodeRegionName(regionName)))) {
continue;
}
return getReplicationBarrierResult(result);
}
}
}
public static long[] getReplicationBarrier(Connection conn, byte[] regionName)
throws IOException {
try (Table table = getMetaHTable(conn)) {
Result result = table.get(new Get(regionName)
.addColumn(HConstants.REPLICATION_BARRIER_FAMILY, HConstants.SEQNUM_QUALIFIER)
.readAllVersions());
return getReplicationBarriers(result);
}
}
public static List<Pair<String, Long>> getTableEncodedRegionNameAndLastBarrier(Connection conn,
TableName tableName) throws IOException {
List<Pair<String, Long>> list = new ArrayList<>();
scanMeta(conn, getTableStartRowForMeta(tableName, QueryType.REPLICATION),
getTableStopRowForMeta(tableName, QueryType.REPLICATION), QueryType.REPLICATION, r -> {
byte[] value =
r.getValue(HConstants.REPLICATION_BARRIER_FAMILY, HConstants.SEQNUM_QUALIFIER);
if (value == null) {
return true;
}
long lastBarrier = Bytes.toLong(value);
String encodedRegionName = RegionInfo.encodeRegionName(r.getRow());
list.add(Pair.newPair(encodedRegionName, lastBarrier));
return true;
});
return list;
}
public static List<String> getTableEncodedRegionNamesForSerialReplication(Connection conn,
TableName tableName) throws IOException {
List<String> list = new ArrayList<>();
scanMeta(conn, getTableStartRowForMeta(tableName, QueryType.REPLICATION),
getTableStopRowForMeta(tableName, QueryType.REPLICATION), QueryType.REPLICATION,
new FirstKeyOnlyFilter(), Integer.MAX_VALUE, r -> {
list.add(RegionInfo.encodeRegionName(r.getRow()));
return true;
});
return list;
}
private static void debugLogMutations(List<? extends Mutation> mutations) throws IOException {
if (!METALOG.isDebugEnabled()) {
return;
}
// Logging each mutation in separate line makes it easier to see diff between them visually
// because of common starting indentation.
for (Mutation mutation : mutations) {
debugLogMutation(mutation);
}
}
private static void debugLogMutation(Mutation p) throws IOException {
METALOG.debug("{} {}", p.getClass().getSimpleName(), p.toJSON());
}
private static Put addSequenceNum(Put p, long openSeqNum, int replicaId) throws IOException {
return p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(p.getRow())
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(p.getTimestamp())
.setType(Type.Put)
.setValue(Bytes.toBytes(openSeqNum))
.build());
}
}
| {
"content_hash": "35366a06b11caf2b00ce5ea36081db4b",
"timestamp": "",
"source": "github",
"line_count": 2162,
"max_line_length": 100,
"avg_line_length": 39.30157261794635,
"alnum_prop": 0.6855360715546663,
"repo_name": "Eshcar/hbase",
"id": "22256df3a061772a68eb645ee18c02c638c4d5f2",
"size": "85776",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "25330"
},
{
"name": "C",
"bytes": "28534"
},
{
"name": "C++",
"bytes": "56085"
},
{
"name": "CMake",
"bytes": "13186"
},
{
"name": "CSS",
"bytes": "37063"
},
{
"name": "Dockerfile",
"bytes": "4842"
},
{
"name": "Groovy",
"bytes": "37699"
},
{
"name": "HTML",
"bytes": "17275"
},
{
"name": "Java",
"bytes": "35340173"
},
{
"name": "JavaScript",
"bytes": "2694"
},
{
"name": "Makefile",
"bytes": "1359"
},
{
"name": "PHP",
"bytes": "8385"
},
{
"name": "Perl",
"bytes": "383739"
},
{
"name": "Python",
"bytes": "90226"
},
{
"name": "Ruby",
"bytes": "664567"
},
{
"name": "Shell",
"bytes": "254787"
},
{
"name": "Thrift",
"bytes": "52354"
},
{
"name": "XSLT",
"bytes": "6764"
}
],
"symlink_target": ""
} |
using System;
using System.Drawing;
using MonoTouch.UIKit;
namespace NuGetSearch.IOS
{
/// <summary>
/// This code was adapted from the example at http://docs.xamarin.com/recipes/ios/standard_controls/popovers/display_a_loading_message/
/// </summary>
public class LoadingOverlay : UIView
{
private UIActivityIndicatorView activitySpinner;
private UILabel loadingLabel;
/// <summary>
/// Initializes a new instance of the <see cref="NuGetSearch.IOS.LoadingOverlay"/> class.
/// </summary>
/// <param name="frame">Frame.</param>
/// <param name="text">Text.</param>
public LoadingOverlay(RectangleF frame, string text) : base(frame)
{
// configurable bits
this.BackgroundColor = UIColor.Black;
this.Alpha = 0.75f;
this.AutoresizingMask = UIViewAutoresizing.FlexibleDimensions;
float labelHeight = 22;
float labelWidth = Frame.Width - 20;
// derive the center x and y
float centerX = Frame.Width / 2;
float centerY = Frame.Height / 2;
// create the activity spinner, center it horizontall and put it 5 points above center x
this.activitySpinner = new UIActivityIndicatorView(UIActivityIndicatorViewStyle.WhiteLarge);
this.activitySpinner.Frame = new RectangleF(
centerX - (this.activitySpinner.Frame.Width / 2),
centerY - this.activitySpinner.Frame.Height - 20,
this.activitySpinner.Frame.Width,
this.activitySpinner.Frame.Height);
this.activitySpinner.AutoresizingMask = UIViewAutoresizing.FlexibleMargins;
this.AddSubview(this.activitySpinner);
this.activitySpinner.StartAnimating();
// create and configure the "Loading Data" label
this.loadingLabel = new UILabel(new RectangleF(
centerX - (labelWidth / 2),
centerY + 20,
labelWidth,
labelHeight));
this.loadingLabel.BackgroundColor = UIColor.Clear;
this.loadingLabel.TextColor = UIColor.White;
this.loadingLabel.Text = text;
this.loadingLabel.TextAlignment = UITextAlignment.Center;
this.loadingLabel.AutoresizingMask = UIViewAutoresizing.FlexibleMargins;
this.AddSubview(this.loadingLabel);
}
/// <summary>
/// Fades out the control and then removes it from the super view
/// </summary>
public void Hide()
{
UIView.Animate(
0.5, // duration
() => { Alpha = 0; },
() => { RemoveFromSuperview(); });
}
}
} | {
"content_hash": "f6a3a935ee4dbde6d85035020e5363ad",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 139,
"avg_line_length": 36.65714285714286,
"alnum_prop": 0.6480904130943103,
"repo_name": "brianpursley/nuget-search-mobile",
"id": "ce38c72474cd9a5f063bdd5bc26c40106ec32a40",
"size": "2568",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/NuGetSearch.iOS/LoadingOverlay.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "153093"
}
],
"symlink_target": ""
} |
package thisis.a.test
/*
* Copyright 2013-2018 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0.
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied.
*
* See the License for the specific language governing permissions
* and limitations under the License.
*/
import org.junit.Assert._
import org.junit.Test
class ATest {
@Test
def testMe = assertEquals(1,1)
@Test
def testMeToo = assertEquals("comparing unequal ints is WRONG",3,1)
} | {
"content_hash": "2d706c0d318c9bfcb41aa85025bf14a9",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 69,
"avg_line_length": 25.5,
"alnum_prop": 0.7291666666666666,
"repo_name": "JetBrains/sbt-tc-logger",
"id": "2104a8e4d1390fca2a1bd6d2aee4dc0937530b34",
"size": "1420",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/testdata/1.0/testsupport/junit/src/test/scala/thisis/a/test/ATest.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "20619"
},
{
"name": "Scala",
"bytes": "28751"
}
],
"symlink_target": ""
} |
<?php
namespace app\models;
use Yii;
use \app\models\base\TaxSettings as BaseTaxSettings;
/**
* This is the model class for table "tax_settings".
*/
class TaxSettings extends BaseTaxSettings
{
/**
* @inheritdoc
*/
public function rules()
{
return array_replace_recursive(parent::rules(),
[
[['tax_key', 'tax_percent', 'status'], 'required'],
[['tax_percent'], 'number'],
[['status'], 'integer'],
[['tax_key'], 'string', 'max' => 100],
[['tax_key'], 'unique']
]);
}
}
| {
"content_hash": "e9076198ab55b0fd828c72c160da6474",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 63,
"avg_line_length": 20.75,
"alnum_prop": 0.5215146299483648,
"repo_name": "technodweep/gst-invoice",
"id": "cadbbf6348ceffe77ff4aab16a76c5a80e363f3f",
"size": "581",
"binary": false,
"copies": "1",
"ref": "refs/heads/github",
"path": "models/TaxSettings.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1030"
},
{
"name": "CSS",
"bytes": "1842"
},
{
"name": "JavaScript",
"bytes": "5969"
},
{
"name": "PHP",
"bytes": "210712"
}
],
"symlink_target": ""
} |
class EditorMailer < ActionMailer::Base
default :from => "copypasta <[email protected]>"
layout 'editor_email'
helper :edits
def new_edit_notice(edit, editor)
@edit = edit
@editor = editor
from = "copypasta <copypasta+edit-#{edit.id}-#{edit.key}@credibl.es>"
mail(:to => editor.email, :from => from, :subject => "Corrections for #{edit.page.url}", :bcc => '[email protected]')
end
def edit_message(edit, editor, options = {})
@options = options
@edit = edit
name = options[:from_name]
name = 'copypasta' if name.blank?
from = "#{name} <copypasta+edit-#{edit.id}-#{edit.key}@credibl.es>"
mail(:to => editor.email, :from => from, :subject => "Re: Corrections for #{edit.page.url}", :bcc => '[email protected]')
end
def receive(email)
addr = ReceivedEmail.parse_address(email.to.join(","))
return unless addr && addr[:id]
e = Edit.where(:id => addr[:id]).first
return if e.nil?
options = {}
options[:from_name] = email[:from].display_names.join(",")
body = (email.text_part && email.text_part.body.to_s) || email.body.to_s
if addr[:key].blank? #user response
options[:message] = body
e.page.account.editors.each do |editor|
Rails.logger.info("EditorMailer: Sending user response on #{e.id} to #{editor.email}")
EditorMailer.edit_message(e, editor, options).deliver
end
true
else #editor response
ins = ReceivedEmail.parse_body(body, addr[:key])
if e && addr[:key] == e.key
options[:message] = ins[:message]
unless ins[:status].blank? || ins[:status] == e.status
options[:old_status] = e.status
e.status = ins[:status]
Rails.logger.info "EditorMailer: Updating status on edit #{e.id}: #{ins[:status]}"
e.save!
end
unless e.email.blank?
Rails.logger.info("EditorMailer: Sending editor response on #{e.id} to #{e.email}")
UserMailer.edit_status_change_notice(e, options).deliver
end
true
elsif e && addr[:key] != e.key
Rails.logger.info "EditorMailer: Key for #{e.id} didn't match: #{addr[:key]}"
false
elsif e.nil?
Rails.logger.info "EditorMailer: Can't find edit #{addr[:id]}, ignoring email"
false
end
end
end
end
| {
"content_hash": "040cca995907466a506818dbb647c339",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 123,
"avg_line_length": 34.73134328358209,
"alnum_prop": 0.6055006446067899,
"repo_name": "mrkurt/copypasta",
"id": "ac8e4575ac76573ac933bcf6e651825ef5158ee8",
"size": "2327",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/mailers/editor_mailer.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "14017"
},
{
"name": "JavaScript",
"bytes": "197032"
},
{
"name": "Ruby",
"bytes": "53904"
}
],
"symlink_target": ""
} |
module Octokit
class Client
# Methods for the Releases API
#
# @see https://developer.github.com/v3/repos/releases/
module Releases
# List releases for a repository
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @return [Array<Sawyer::Resource>] A list of releases
# @see https://developer.github.com/v3/repos/releases/#list-releases-for-a-repository
def releases(repo, options = {})
paginate "#{Repository.path repo}/releases", options
end
alias :list_releases :releases
# Create a release
#
# @param repo [Integer, String, Repository, Hash] A GitHub repository
# @param tag_name [String] Git tag from which to create release
# @option options [String] :target_commitish Specifies the commitish value that determines where the Git tag is created from.
# @option options [String] :name Name for the release
# @option options [String] :body Content for release notes
# @option options [Boolean] :draft Mark this release as a draft
# @option options [Boolean] :prerelease Mark this release as a pre-release
# @return [Sawyer::Resource] The release
# @see https://developer.github.com/v3/repos/releases/#create-a-release
def create_release(repo, tag_name, options = {})
opts = options.merge(:tag_name => tag_name)
post "#{Repository.path repo}/releases", opts
end
# Get a release
#
# @param url [String] URL for the release as returned from .releases
# @return [Sawyer::Resource] The release
# @see https://developer.github.com/v3/repos/releases/#get-a-single-release
def release(url, options = {})
get url, options
end
# Update a release
#
# @param url [String] URL for the release as returned from .releases
# @option options [String] :target_commitish Specifies the commitish value that determines where the Git tag is created from.
# @option options [String] :name Name for the release
# @option options [String] :body Content for release notes
# @option options [Boolean] :draft Mark this release as a draft
# @option options [Boolean] :prerelease Mark this release as a pre-release
# @return [Sawyer::Resource] The release
# @see https://developer.github.com/v3/repos/releases/#edit-a-release
def update_release(url, options = {})
patch url, options
end
alias :edit_release :update_release
# Delete a release
#
# @param url [String] URL for the release as returned from .releases
# @return [Boolean] Success or failure
# @see https://developer.github.com/v3/repos/releases/#delete-a-release
def delete_release(url, options = {})
boolean_from_response(:delete, url, options)
end
# List release assets
#
# @param release_url [String] URL for the release as returned from .releases
# @return [Array<Sawyer::Resource>] A list of release assets
# @see https://developer.github.com/v3/repos/releases/#list-assets-for-a-release
def release_assets(release_url, options = {})
paginate release(release_url).rels[:assets].href, options
end
# Upload a release asset
#
# @param release_url [String] URL for the release as returned from .releases
# @param path_or_file [String] Path to file to upload
# @option options [String] :content_type The MIME type for the file to upload
# @option options [String] :name The name for the file
# @return [Sawyer::Resource] The release asset
# @see https://developer.github.com/v3/repos/releases/#upload-a-release-asset
def upload_asset(release_url, path_or_file, options = {})
file = path_or_file.respond_to?(:read) ? path_or_file : File.new(path_or_file, "r+b")
options[:content_type] ||= content_type_from_file(file)
raise Octokit::MissingContentType.new if options[:content_type].nil?
unless name = options[:name]
name = File.basename(file.path)
end
upload_url = release(release_url).rels[:upload].href_template.expand(:name => name)
request :post, upload_url, file.read, parse_query_and_convenience_headers(options)
ensure
file.close if file
end
# Get a single release asset
#
#
# @param asset_url [String] URL for the asset as returned from .release_assets
# @return [Sawyer::Resource] The release asset
# @see https://developer.github.com/v3/repos/releases/#get-a-single-release-asset
def release_asset(asset_url, options = {})
get(asset_url, options)
end
# Update a release asset
#
# @param asset_url [String] URL for the asset as returned from .release_assets
# @option options [String] :name The name for the file
# @option options [String] :label The download text for the file
# @return [Sawyer::Resource] The release asset
# @see https://developer.github.com/v3/repos/releases/#edit-a-release-asset
def update_release_asset(asset_url, options = {})
patch(asset_url, options)
end
alias :edit_release_asset :update_release_asset
# Delete a release asset
#
# @param asset_url [String] URL for the asset as returned from .release_assets
# @return [Boolean] Success or failure
# @see https://developer.github.com/v3/repos/releases/#delete-a-release-asset
def delete_release_asset(asset_url, options = {})
boolean_from_response(:delete, asset_url, options)
end
private
def content_type_from_file(file)
require 'mime/types'
if mime_type = MIME::Types.type_for(file.path).first
mime_type.content_type
end
rescue LoadError
msg = "Please pass content_type or install mime-types gem to guess content type from file"
raise Octokit::MissingContentType.new(msg)
end
end
end
end
| {
"content_hash": "1979af48b04eb150f013cd6ea2722c19",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 131,
"avg_line_length": 41.979166666666664,
"alnum_prop": 0.6516129032258065,
"repo_name": "r7kamura/octokit.rb",
"id": "289ea30f925b3e479d4e46bb09d8c975fc889c42",
"size": "6045",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/octokit/client/releases.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "38"
},
{
"name": "Ruby",
"bytes": "422502"
},
{
"name": "Shell",
"bytes": "1186"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "927c2a18637a1732d55aec570c326b2d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "87e5bf544acfe291554f094e7e944c2754f89b7d",
"size": "197",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Koanophyllon dolphinii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
A small library for presenters in a rails application.
## Installation
Add this line to your application's Gemfile:
gem 'presento'
And then execute:
$ bundle
Or install it yourself as:
$ gem install presento
## Usage
Specify presento in your Gemfile and then you can create your presenters as
````ruby
class MyPresenter > Presento::Base
def awesome_method
42
end
end
# Usage:
MyPresenter.present(my_object) do |my_object|
# in this context my_object will be presented with an instance of MyPresenter
my_object.awesome_method
end
# It works with enumerables to
MyPresenter.present_each(my_objects) do |my_object|
# in this context my_object will be presented with an instance of MyPresenter
my_object.awesome_method
end
````
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
| {
"content_hash": "121d0763a1f3ae26cd51e2a92580ce3b",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 79,
"avg_line_length": 19.745098039215687,
"alnum_prop": 0.733862959285005,
"repo_name": "kwando/presento",
"id": "0221ab529c33f2d490afe70c3b8f247f35727f80",
"size": "1019",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "1719"
}
],
"symlink_target": ""
} |
package com.navercorp.pinpoint.plugin.thrift.interceptor.server;
import com.navercorp.pinpoint.bootstrap.interceptor.scope.InterceptorScope;
import com.navercorp.pinpoint.common.util.ArrayUtils;
import com.navercorp.pinpoint.plugin.thrift.ThriftClientCallContextAttachmentFactory;
import com.navercorp.pinpoint.plugin.thrift.field.getter.TProtocolFieldGetter;
import org.apache.thrift.ProcessFunction;
import org.apache.thrift.protocol.TProtocol;
import com.navercorp.pinpoint.bootstrap.interceptor.AroundInterceptor;
import com.navercorp.pinpoint.bootstrap.interceptor.scope.InterceptorScopeInvocation;
import com.navercorp.pinpoint.bootstrap.logging.PLogger;
import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory;
import com.navercorp.pinpoint.plugin.thrift.ThriftClientCallContext;
import com.navercorp.pinpoint.plugin.thrift.ThriftConstants;
import com.navercorp.pinpoint.plugin.thrift.field.accessor.ServerMarkerFlagFieldAccessor;
/**
* This interceptor marks the starting point for tracing {@link org.apache.thrift.ProcessFunction ProcessFunction} and creates the client call context to share
* with other interceptors within the current scope.
* <p>
* <tt>TBaseProcessorProcessInterceptor</tt> -> <b><tt>ProcessFunctionProcessInterceptor</tt></b> -> <tt>TProtocolReadFieldBeginInterceptor</tt> <->
* <tt>TProtocolReadTTypeInterceptor</tt> -> <tt>TProtocolReadMessageEndInterceptor</tt>
* <p>
* Based on Thrift 0.9.x
*
* @author HyunGil Jeong
* @see com.navercorp.pinpoint.plugin.thrift.interceptor.server.TBaseProcessorProcessInterceptor TBaseProcessorProcessInterceptor
* @see com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadFieldBeginInterceptor TProtocolReadFieldBeginInterceptor
* @see com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadTTypeInterceptor TProtocolReadTTypeInterceptor
* @see com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadMessageEndInterceptor TProtocolReadMessageEndInterceptor
*/
public class ProcessFunctionProcessInterceptor implements AroundInterceptor {
private final PLogger logger = PLoggerFactory.getLogger(this.getClass());
private final boolean isDebug = logger.isDebugEnabled();
private final InterceptorScope scope;
public ProcessFunctionProcessInterceptor(InterceptorScope scope) {
this.scope = scope;
}
@Override
public void before(Object target, Object[] args) {
if (isDebug) {
logger.beforeInterceptor(target, args);
}
// process(int seqid, TProtocol iprot, TProtocol oprot, I iface)
if (ArrayUtils.getLength(args) != 4) {
return;
}
String methodName = ThriftConstants.UNKNOWN_METHOD_NAME;
if (target instanceof ProcessFunction) {
final ProcessFunction<?, ?> processFunction = (ProcessFunction<?, ?>) target;
methodName = processFunction.getMethodName();
}
final InterceptorScopeInvocation currentTransaction = this.scope.getCurrentInvocation();
final Object attachment = currentTransaction.getOrCreateAttachment(ThriftClientCallContextAttachmentFactory.INSTANCE);
if (attachment instanceof ThriftClientCallContext) {
final ThriftClientCallContext clientCallContext = (ThriftClientCallContext) attachment;
clientCallContext.setMethodName(methodName);
}
// Set server marker - server handlers may create a client to call another Thrift server.
// When this happens, TProtocol interceptors for clients are triggered since technically they're still within THRIFT_SERVER_SCOPE.
// We set the marker inside server's input protocol to safeguard against such cases.
final Object iprot = args[1];
// With the addition of TProtocolDecorator, iprot may actually be a wrapper around the actual input protocol
final Object rootInputProtocol = getRootInputProtocol(iprot);
if (validateInputProtocol(rootInputProtocol)) {
((ServerMarkerFlagFieldAccessor) rootInputProtocol)._$PINPOINT$_setServerMarkerFlag(true);
}
}
@Override
public void after(Object target, Object[] args, Object result, Throwable throwable) {
if (isDebug) {
logger.afterInterceptor(target, args, result, throwable);
}
// Unset server marker
if (ArrayUtils.getLength(args) != 4) {
Object iprot = args[1];
if (validateInputProtocol(iprot)) {
((ServerMarkerFlagFieldAccessor) iprot)._$PINPOINT$_setServerMarkerFlag(false);
}
}
}
private Object getRootInputProtocol(Object iprot) {
if (iprot instanceof TProtocolFieldGetter) {
return getRootInputProtocol(((TProtocolFieldGetter) iprot)._$PINPOINT$_getTProtocol());
} else {
return iprot;
}
}
private boolean validateInputProtocol(Object iprot) {
if (iprot instanceof TProtocol) {
if (iprot instanceof ServerMarkerFlagFieldAccessor) {
return true;
} else {
if (isDebug) {
logger.debug("Invalid target object. Need field accessor({}).", ServerMarkerFlagFieldAccessor.class.getName());
}
return false;
}
}
return false;
}
}
| {
"content_hash": "ef2b2c0d209980e2835292564f66d8bf",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 159,
"avg_line_length": 48.80530973451327,
"alnum_prop": 0.7107887579329103,
"repo_name": "minwoo-jung/pinpoint",
"id": "a5ad60e59a789b442e67af226dbd10bb90c72bec",
"size": "6123",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "plugins/thrift/src/main/java/com/navercorp/pinpoint/plugin/thrift/interceptor/server/ProcessFunctionProcessInterceptor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "200"
},
{
"name": "CSS",
"bytes": "219960"
},
{
"name": "Groovy",
"bytes": "1423"
},
{
"name": "HTML",
"bytes": "204583"
},
{
"name": "Java",
"bytes": "19697025"
},
{
"name": "JavaScript",
"bytes": "6172"
},
{
"name": "Kotlin",
"bytes": "1327"
},
{
"name": "Shell",
"bytes": "6420"
},
{
"name": "TSQL",
"bytes": "978"
},
{
"name": "Thrift",
"bytes": "15920"
},
{
"name": "TypeScript",
"bytes": "1686957"
}
],
"symlink_target": ""
} |
static_assert(sizeof(glm::int8) == 1, "int8 size isn't 1 byte on this platform");
static_assert(sizeof(glm::int16) == 2, "int16 size isn't 2 bytes on this platform");
static_assert(sizeof(glm::int32) == 4, "int32 size isn't 4 bytes on this platform");
static_assert(sizeof(glm::int64) == 8, "int64 size isn't 8 bytes on this platform");
static_assert(sizeof(glm::int16) == sizeof(short), "signed short size isn't 4 bytes on this platform");
static_assert(sizeof(glm::int32) == sizeof(int), "signed int size isn't 4 bytes on this platform");
#endif
static int test_size()
{
int Error = 0;
Error += sizeof(glm::int8) == 1 ? 0 : 1;
Error += sizeof(glm::int16) == 2 ? 0 : 1;
Error += sizeof(glm::int32) == 4 ? 0 : 1;
Error += sizeof(glm::int64) == 8 ? 0 : 1;
return Error;
}
static int test_comp()
{
int Error = 0;
Error += sizeof(glm::int8) < sizeof(glm::int16) ? 0 : 1;
Error += sizeof(glm::int16) < sizeof(glm::int32) ? 0 : 1;
Error += sizeof(glm::int32) < sizeof(glm::int64) ? 0 : 1;
return Error;
}
int main()
{
int Error = 0;
Error += test_size();
Error += test_comp();
return Error;
}
| {
"content_hash": "a2418f24ee03dfc481bc76e7e8f8dd1d",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 104,
"avg_line_length": 27.95,
"alnum_prop": 0.6332737030411449,
"repo_name": "madeso/ride",
"id": "b55c6ca7f11fba0eb61426710b4bbe9434a58313",
"size": "1185",
"binary": false,
"copies": "15",
"ref": "refs/heads/main",
"path": "external/glm/test/ext/ext_scalar_int_sized.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "11503"
},
{
"name": "C++",
"bytes": "1061575"
},
{
"name": "CMake",
"bytes": "38450"
},
{
"name": "Lua",
"bytes": "1012"
},
{
"name": "Python",
"bytes": "310"
},
{
"name": "Rust",
"bytes": "2815"
}
],
"symlink_target": ""
} |
package org.apache.distributedlog.common.util;
/**
* Permit.
*/
@FunctionalInterface
public interface Permit {
void release();
}
| {
"content_hash": "7c948a437617be4bd7a86c81d89a1c5e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 46,
"avg_line_length": 11.583333333333334,
"alnum_prop": 0.697841726618705,
"repo_name": "apache/bookkeeper",
"id": "24cb63dfd58714304e9f33a7d4ae31984f0edac3",
"size": "945",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "stream/distributedlog/common/src/main/java/org/apache/distributedlog/common/util/Permit.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11886"
},
{
"name": "C++",
"bytes": "17844"
},
{
"name": "Dockerfile",
"bytes": "11186"
},
{
"name": "Groovy",
"bytes": "48262"
},
{
"name": "Java",
"bytes": "15908174"
},
{
"name": "JavaScript",
"bytes": "12042"
},
{
"name": "Makefile",
"bytes": "6544"
},
{
"name": "Python",
"bytes": "215336"
},
{
"name": "Roff",
"bytes": "39396"
},
{
"name": "SCSS",
"bytes": "1345"
},
{
"name": "Shell",
"bytes": "183376"
},
{
"name": "Thrift",
"bytes": "1473"
}
],
"symlink_target": ""
} |
#region Copyright (c) 2003-2005, Luke T. Maxon
#endregion
using Xunit.Extensions.Forms.TestApplications;
using Xunit;
using System.ComponentModel;
namespace Xunit.Extensions.Forms.Recorder.Test
{
///<summary>
/// Test fixture for the <see cref="MenuItemRecorder"/>.
///</summary>
[Category("Recorder")]
public class MenuItemRecorderTest : XunitFormTest
{
[Fact]
public void MenuItemClick()
{
ContextMenuTestForm form = new ContextMenuTestForm();
form.Show();
TestWriter writer = new TestWriter(form);
Assert.Equal("", writer.Test);
MenuItemTester menuItem = new MenuItemTester("Click To Count");
menuItem.Click();
Assert.Equal(
@"[Fact]
public void Test()
{
MenuItemTester ClickToCount = new MenuItemTester(""Click To Count"");
ClickToCount.Click();
}",
writer.Test);
}
}
} | {
"content_hash": "8bbdd9b73b3315079ad7002a0a4a5110",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 75,
"avg_line_length": 22.466666666666665,
"alnum_prop": 0.5766567754698319,
"repo_name": "ChrisPelatari/XunitForms",
"id": "b3a899324ff7670ed7852d6968ce6e7ea4ae92eb",
"size": "2801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/XunitForms.Test/Recorder/MenuItemRecorderTest.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "41"
},
{
"name": "C#",
"bytes": "1078693"
}
],
"symlink_target": ""
} |
package com.github.daytron.revworks.view.main;
import com.github.daytron.revworks.data.ExternalLink;
import com.vaadin.server.ExternalResource;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.Link;
import com.vaadin.ui.VerticalLayout;
/**
* UI widget component for dashboard footer.
*
* @author Ryan Gilera
*/
@SuppressWarnings("serial")
public class FooterComponent extends VerticalLayout {
/**
* A class constructor that builds its UI components upon object creation.
*/
public FooterComponent() {
setWidth("100%");
setSpacing(true);
setStyleName("dashboard-footer");
addComponent(createPrimaryLinks());
addComponent(createBottomRow());
}
/**
* Creates the top row of the footer and adds external links.
*
* @return HorizontalLayout object
*/
private HorizontalLayout createPrimaryLinks() {
final HorizontalLayout linkLayout = new HorizontalLayout();
linkLayout.setWidth("100%");
linkLayout.setSpacing(true);
Label leftSpaceLabel = new Label(" ");
leftSpaceLabel.setSizeFull();
linkLayout.addComponent(leftSpaceLabel);
Link gsmLondonMainLinkButton = new Link(ExternalLink.GSM_LONDON.getName(),
new ExternalResource(ExternalLink.GSM_LONDON.getLink()));
gsmLondonMainLinkButton.setTargetName("_blank");
gsmLondonMainLinkButton.setSizeUndefined();
Link studentPortalLinkButton = new Link(ExternalLink.STUDENT_PORTAL.getName(),
new ExternalResource(ExternalLink.STUDENT_PORTAL.getLink()));
studentPortalLinkButton.setTargetName("_blank");
studentPortalLinkButton.setSizeUndefined();
Link lecturerPortalLinkButton = new Link(ExternalLink.LECTURER_PORTAL.getName(),
new ExternalResource(ExternalLink.LECTURER_PORTAL.getLink()));
lecturerPortalLinkButton.setTargetName("_blank");
lecturerPortalLinkButton.setSizeUndefined();
Link gsmLearnLinkButton = new Link(ExternalLink.GSM_LEARN.getName(),
new ExternalResource(ExternalLink.GSM_LEARN.getLink()));
gsmLearnLinkButton.setTargetName("_blank");
gsmLearnLinkButton.setSizeUndefined();
linkLayout.addComponents(gsmLondonMainLinkButton, studentPortalLinkButton,
lecturerPortalLinkButton, gsmLearnLinkButton);
Label rightSpaceLabel = new Label(" ");
rightSpaceLabel.setSizeFull();
linkLayout.addComponent(rightSpaceLabel);
linkLayout.setExpandRatio(leftSpaceLabel, 1);
linkLayout.setExpandRatio(rightSpaceLabel, 1);
return linkLayout;
}
/**
* Creates the bottom row of the footer and adds external links.
*
* @return HorizontalLayout object
*/
private HorizontalLayout createBottomRow() {
final HorizontalLayout bottomRowLayout = new HorizontalLayout();
bottomRowLayout.setWidth("100%");
Label firstSpace = new Label(" ");
firstSpace.setSizeFull();
Label allRightsReservedLabel = new Label();
allRightsReservedLabel.setValue("All Rights Reserved 2015. Created by Ryan Gilera [");
allRightsReservedLabel.setSizeUndefined();
Link githubLink = new Link(ExternalLink.MY_GITHUB_PAGE.getName(),
new ExternalResource(ExternalLink.MY_GITHUB_PAGE.getLink()));
githubLink.setTargetName("_blank");
githubLink.setSizeUndefined();
Label midBracketLabel = new Label("] [");
midBracketLabel.setSizeUndefined();
Link linkedinLink = new Link(ExternalLink.MY_LINKEDIN_PAGE.getName(),
new ExternalResource(ExternalLink.MY_LINKEDIN_PAGE.getLink()));
linkedinLink.setTargetName("_blank");
linkedinLink.setSizeUndefined();
Label endOfLabel = new Label("]");
endOfLabel.setSizeFull();
bottomRowLayout.addComponents(firstSpace, allRightsReservedLabel,
githubLink, midBracketLabel, linkedinLink, endOfLabel);
bottomRowLayout.setExpandRatio(firstSpace, 1);
bottomRowLayout.setExpandRatio(endOfLabel, 1);
return bottomRowLayout;
}
}
| {
"content_hash": "ad1ad5e9febe0867399526972a5bb13f",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 94,
"avg_line_length": 36.23076923076923,
"alnum_prop": 0.6869544703939608,
"repo_name": "Daytron/revworks",
"id": "cf9b801c066e0f003d1cc37f2bfb7884a4191ea9",
"size": "4835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/github/daytron/revworks/view/main/FooterComponent.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19132"
},
{
"name": "Java",
"bytes": "504632"
}
],
"symlink_target": ""
} |
{# ------------------------------------------------------- #}
{# INDIVIDUAL VIEW FOR EACH articles #}
{# This page can use any data from http:localhost:2000/cms/#/form/articles/ #}
{# Webhook uses the SWIG.js (like Djagno/Twig) templating system. Their documentation is here: #}
{# http://paularmstrong.github.io/swig/docs/tags/ #}
{# Learn about calling data into Webhook pages here: #}
{# http://www.webhook.com/docs/template-rules-and-filters/ #}
{# ------------------------------------------------------- #}
{# Confused what extends and blocks do? Watch a primer: #}
{# http://www.webhook.com/docs/template-inheritance-blocks/ #}
{% extends "templates/partials/base.html" %}
{# This sets our page <title>. It will append this articles's name to the site title defined in base.html #}
{% block title %}{{ item.name }}{% endblock %}
{% block head_extra %}
<link rel="canonical" href="https://beliefmap.org/{{ item.slug }}" />
{% endblock %}
{% block content %}
<div class="container-fluid margin-wrapper">
<h1 class="page-header text-center">{{ item.name }}</h1>
{{ item.article_body|markdown }}
</div>
{% endblock %}
| {
"content_hash": "296aad746d4b943a626e86648d928784",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 108,
"avg_line_length": 45.2,
"alnum_prop": 0.6132743362831858,
"repo_name": "Treesearch/Treesearch",
"id": "e862aa5b5d306cda9b053a3804c9a5fd108f7181",
"size": "1130",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "templates/pages/individual.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "45466"
},
{
"name": "HTML",
"bytes": "3376897"
},
{
"name": "JavaScript",
"bytes": "140421"
}
],
"symlink_target": ""
} |
// <copyright file="NativeEnums.cs" company="Nick Lowe">
// Copyright © Nick Lowe 2009
// </copyright>
// <author>Nick Lowe</author>
// <email>[email protected]</email>
// <url>http://processprivileges.codeplex.com/</url>
namespace System.Security.Processes
{
using System;
using System.Diagnostics.CodeAnalysis;
/// <summary>
/// <para>Privilege attributes that augment a <see cref="Privilege"/> with state information.</para>
/// </summary>
/// <remarks>
/// <para>Use the following checks to interpret privilege attributes:</para>
/// <para>
/// <c>// Privilege is disabled.<br/>if (attributes == PrivilegeAttributes.Disabled) { /* ... */ }</c>
/// </para>
/// <para>
/// <c>// Privilege is enabled.<br/>if ((attributes & PrivilegeAttributes.Enabled) == PrivilegeAttributes.Enabled) { /* ... */ }</c>
/// </para>
/// <para>
/// <c>// Privilege is removed.<br/>if ((attributes & PrivilegeAttributes.Removed) == PrivilegeAttributes.Removed) { /* ... */ }</c>
/// </para>
/// <para>To avoid having to work with a flags based enumerated type, use <see cref="ProcessExtensions.GetPrivilegeState(PrivilegeAttributes)"/> on attributes.</para>
/// </remarks>
[Flags,
SuppressMessage(
"Microsoft.Design",
"CA1008:EnumsShouldHaveZeroValue",
Justification = "Native enum."),
SuppressMessage(
"Microsoft.Usage",
"CA2217:DoNotMarkEnumsWithFlags",
Justification = "Native enum.")]
public enum PrivilegeAttributes
{
/// <summary>Privilege is disabled.</summary>
Disabled = 0,
/// <summary>Privilege is enabled by default.</summary>
EnabledByDefault = 1,
/// <summary>Privilege is enabled.</summary>
Enabled = 2,
/// <summary>Privilege is removed.</summary>
Removed = 4,
/// <summary>Privilege used to gain access to an object or service.</summary>
UsedForAccess = -2147483648
}
/// <summary>Access rights for access tokens.</summary>
[Flags,
SuppressMessage(
"Microsoft.Design",
"CA1008:EnumsShouldHaveZeroValue",
Justification = "Native enum."),
SuppressMessage("Microsoft.Usage",
"CA2217:DoNotMarkEnumsWithFlags",
Justification = "Native enum.")]
public enum TokenAccessRights
{
/// <summary>Right to attach a primary token to a process.</summary>
AssignPrimary = 0x0001,
/// <summary>Right to duplicate an access token.</summary>
Duplicate = 0x0002,
/// <summary>Right to attach an impersonation access token to a process.</summary>
Impersonate = 0x0004,
/// <summary>Right to query an access token.</summary>
Query = 0x0008,
/// <summary>Right to query the source of an access token.</summary>
QuerySource = 0x0010,
/// <summary>Right to enable or disable the privileges in an access token.</summary>
AdjustPrivileges = 0x0020,
/// <summary>Right to adjust the attributes of the groups in an access token.</summary>
AdjustGroups = 0x0040,
/// <summary>Right to change the default owner, primary group, or DACL of an access token.</summary>
AdjustDefault = 0x0080,
/// <summary>Right to adjust the session ID of an access token.</summary>
AdjustSessionId = 0x0100,
/// <summary>Combines all possible access rights for a token.</summary>
AllAccess = AccessTypeMasks.StandardRightsRequired |
AssignPrimary |
Duplicate |
Impersonate |
Query |
QuerySource |
AdjustPrivileges |
AdjustGroups |
AdjustDefault |
AdjustSessionId,
/// <summary>Combines the standard rights required to read with <see cref="Query"/>.</summary>
Read = AccessTypeMasks.StandardRightsRead |
Query,
/// <summary>Combines the standard rights required to write with <see cref="AdjustDefault"/>, <see cref="AdjustGroups"/> and <see cref="AdjustPrivileges"/>.</summary>
Write = AccessTypeMasks.StandardRightsWrite |
AdjustPrivileges |
AdjustGroups |
AdjustDefault,
/// <summary>Combines the standard rights required to execute with <see cref="Impersonate"/>.</summary>
Execute = AccessTypeMasks.StandardRightsExecute |
Impersonate
}
[Flags]
internal enum AccessTypeMasks
{
Delete = 65536,
ReadControl = 131072,
WriteDAC = 262144,
WriteOwner = 524288,
Synchronize = 1048576,
StandardRightsRequired = 983040,
StandardRightsRead = ReadControl,
StandardRightsWrite = ReadControl,
StandardRightsExecute = ReadControl,
StandardRightsAll = 2031616,
SpecificRightsAll = 65535
}
internal enum TokenInformationClass
{
None,
TokenUser,
TokenGroups,
TokenPrivileges,
TokenOwner,
TokenPrimaryGroup,
TokenDefaultDacl,
TokenSource,
TokenType,
TokenImpersonationLevel,
TokenStatistics,
TokenRestrictedSids,
TokenSessionId,
TokenGroupsAndPrivileges,
TokenSessionReference,
TokenSandBoxInert,
TokenAuditPolicy,
TokenOrigin,
TokenElevationType,
TokenLinkedToken,
TokenElevation,
TokenHasRestrictions,
TokenAccessInformation,
TokenVirtualizationAllowed,
TokenVirtualizationEnabled,
TokenIntegrityLevel,
TokenUIAccess,
TokenMandatoryPolicy,
TokenLogonSid,
MaxTokenInfoClass
}
[Flags]
enum CREATE_PROCESS_FLAGS
{
CREATE_BREAKAWAY_FROM_JOB = 0x01000000,
CREATE_DEFAULT_ERROR_MODE = 0x04000000,
CREATE_NEW_CONSOLE = 0x00000010,
CREATE_NEW_PROCESS_GROUP = 0x00000200,
CREATE_NO_WINDOW = 0x08000000,
CREATE_PROTECTED_PROCESS = 0x00040000,
CREATE_PRESERVE_CODE_AUTHZ_LEVEL = 0x02000000,
CREATE_SEPARATE_WOW_VDM = 0x00000800,
CREATE_SHARED_WOW_VDM = 0x00001000,
CREATE_SUSPENDED = 0x00000004,
CREATE_UNICODE_ENVIRONMENT = 0x00000400,
DEBUG_ONLY_THIS_PROCESS = 0x00000002,
DEBUG_PROCESS = 0x00000001,
DETACHED_PROCESS = 0x00000008,
EXTENDED_STARTUPINFO_PRESENT = 0x00080000,
INHERIT_PARENT_AFFINITY = 0x00010000
}
public enum LogonType
{
/// <summary>
/// This logon type is intended for users who will be interactively using the computer, such as a user being logged on
/// by a terminal server, remote shell, or similar process.
/// This logon type has the additional expense of caching logon information for disconnected operations;
/// therefore, it is inappropriate for some client/server applications,
/// such as a mail server.
/// </summary>
LOGON32_LOGON_INTERACTIVE = 2,
/// <summary>
/// This logon type is intended for high performance servers to authenticate plaintext passwords.
/// The LogonUser function does not cache credentials for this logon type.
/// </summary>
LOGON32_LOGON_NETWORK = 3,
/// <summary>
/// This logon type is intended for batch servers, where processes may be executing on behalf of a user without
/// their direct intervention. This type is also for higher performance servers that process many plaintext
/// authentication attempts at a time, such as mail or Web servers.
/// The LogonUser function does not cache credentials for this logon type.
/// </summary>
LOGON32_LOGON_BATCH = 4,
/// <summary>
/// Indicates a service-type logon. The account provided must have the service privilege enabled.
/// </summary>
LOGON32_LOGON_SERVICE = 5,
/// <summary>
/// This logon type is for GINA DLLs that log on users who will be interactively using the computer.
/// This logon type can generate a unique audit record that shows when the workstation was unlocked.
/// </summary>
LOGON32_LOGON_UNLOCK = 7,
/// <summary>
/// This logon type preserves the name and password in the authentication package, which allows the server to make
/// connections to other network servers while impersonating the client. A server can accept plaintext credentials
/// from a client, call LogonUser, verify that the user can access the system across the network, and still
/// communicate with other servers.
/// NOTE: Windows NT: This value is not supported.
/// </summary>
LOGON32_LOGON_NETWORK_CLEARTEXT = 8,
/// <summary>
/// This logon type allows the caller to clone its current token and specify new credentials for outbound connections.
/// The new logon session has the same local identifier but uses different credentials for other network connections.
/// NOTE: This logon type is supported only by the LOGON32_PROVIDER_WINNT50 logon provider.
/// NOTE: Windows NT: This value is not supported.
/// </summary>
LOGON32_LOGON_NEW_CREDENTIALS = 9,
}
public enum LogonProvider
{
/// <summary>
/// Use the standard logon provider for the system.
/// The default security provider is negotiate, unless you pass NULL for the domain name and the user name
/// is not in UPN format. In this case, the default provider is NTLM.
/// NOTE: Windows 2000/NT: The default security provider is NTLM.
/// </summary>
LOGON32_PROVIDER_DEFAULT = 0,
LOGON32_PROVIDER_WINNT35 = 1,
LOGON32_PROVIDER_WINNT40 = 2,
LOGON32_PROVIDER_WINNT50 = 3
}
[Flags]
public enum STARTF : uint
{
STARTF_USESHOWWINDOW = 0x00000001,
STARTF_USESIZE = 0x00000002,
STARTF_USEPOSITION = 0x00000004,
STARTF_USECOUNTCHARS = 0x00000008,
STARTF_USEFILLATTRIBUTE = 0x00000010,
STARTF_RUNFULLSCREEN = 0x00000020, // ignored for non-x86 platforms
STARTF_FORCEONFEEDBACK = 0x00000040,
STARTF_FORCEOFFFEEDBACK = 0x00000080,
STARTF_USESTDHANDLES = 0x00000100,
}
public enum ShowWindowCommands : uint
{
/// <summary>
/// Hides the window and activates another window.
/// </summary>
SW_HIDE = 0,
/// <summary>
/// Activates and displays a window. If the window is minimized or maximized, the system restores it to its original size and position. An application should specify this flag when displaying the window for the first time.
/// </summary>
SW_SHOWNORMAL = 1,
/// <summary>
/// Activates and displays a window. If the window is minimized or maximized, the system restores it to its original size and position. An application should specify this flag when displaying the window for the first time.
/// </summary>
SW_NORMAL = 1,
/// <summary>
/// Activates the window and displays it as a minimized window.
/// </summary>
SW_SHOWMINIMIZED = 2,
/// <summary>
/// Activates the window and displays it as a maximized window.
/// </summary>
SW_SHOWMAXIMIZED = 3,
/// <summary>
/// Maximizes the specified window.
/// </summary>
SW_MAXIMIZE = 3,
/// <summary>
/// Displays a window in its most recent size and position. This value is similar to <see cref="ShowWindowCommands.SW_SHOWNORMAL"/>, except the window is not activated.
/// </summary>
SW_SHOWNOACTIVATE = 4,
/// <summary>
/// Activates the window and displays it in its current size and position.
/// </summary>
SW_SHOW = 5,
/// <summary>
/// Minimizes the specified window and activates the next top-level window in the z-order.
/// </summary>
SW_MINIMIZE = 6,
/// <summary>
/// Displays the window as a minimized window. This value is similar to <see cref="ShowWindowCommands.SW_SHOWMINIMIZED"/>, except the window is not activated.
/// </summary>
SW_SHOWMINNOACTIVE = 7,
/// <summary>
/// Displays the window in its current size and position. This value is similar to <see cref="ShowWindowCommands.SW_SHOW"/>, except the window is not activated.
/// </summary>
SW_SHOWNA = 8,
/// <summary>
/// Activates and displays the window. If the window is minimized or maximized, the system restores it to its original size and position. An application should specify this flag when restoring a minimized window.
/// </summary>
SW_RESTORE = 9
}
} | {
"content_hash": "408db9bd2d1450c449f9d798e311bead",
"timestamp": "",
"source": "github",
"line_count": 347,
"max_line_length": 237,
"avg_line_length": 38.46685878962536,
"alnum_prop": 0.6114773748876237,
"repo_name": "FFFF0h/RunPSScript",
"id": "94049e8ba9f01525cc8773db5492827959d55bb8",
"size": "13351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RunPSScript/Security/Processes/NativeEnums.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1308"
},
{
"name": "C#",
"bytes": "714428"
},
{
"name": "PowerShell",
"bytes": "1601"
}
],
"symlink_target": ""
} |
package com.tacticalnuclearstrike.tttumblr.activites;
import java.io.File;
import java.io.FileNotFoundException;
import android.app.Activity;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import com.google.android.apps.analytics.GoogleAnalyticsTracker;
import com.tacticalnuclearstrike.tttumblr.R;
import com.tacticalnuclearstrike.tttumblr.TumblrApi;
import com.tacticalnuclearstrike.tttumblr.TumblrService;
public class UploadImageActivity extends PostActivity {
private static final String TAG = "UploadImageActivity";
Uri outputFileUri;
int TAKE_PICTURE = 0;
int SELECT_IMAGE = 1;
// TumblrApi api;
GoogleAnalyticsTracker tracker;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
tracker = GoogleAnalyticsTracker.getInstance();
tracker.start("UA-9100060-3", 20, this);
// api = new TumblrApi(this);
setContentView(R.layout.uploadimageview);
Intent startIntent = getIntent();
if (startIntent != null && startIntent.getExtras() != null
&& startIntent.getExtras().containsKey(Intent.EXTRA_STREAM)) {
Uri startData = (Uri) startIntent.getExtras().get(
Intent.EXTRA_STREAM);
Log.d(TAG, "got initial data: " + startData.toString());
outputFileUri = startData;
setSelectedImageThumbnail(outputFileUri);
}
setupButtons();
Intent intent = getIntent();
String action = intent.getAction();
if (Intent.ACTION_SEND.equals(action)) {
outputFileUri = (Uri) (intent.getExtras().get(Intent.EXTRA_STREAM));
setSelectedImageThumbnail(outputFileUri);
}
}
private void setupButtons() {
Button btnTakePicture = (Button) findViewById(R.id.btnTakePicture);
btnTakePicture.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
tracker.trackPageView("/UploadImageActivity/TakePhoto");
takePhoto();
}
});
Button btnPostPhoto = (Button) findViewById(R.id.btnPostImage);
btnPostPhoto.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
uploadImage();
}
});
Button btnSelectImage = (Button) findViewById(R.id.btnSelectImage);
btnSelectImage.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
tracker.trackPageView("/UploadImageActivity/SelectImage");
selectImage();
}
});
}
private void takePhoto() {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
File file = new File(Environment.getExternalStorageDirectory(),
"test.jpg");
outputFileUri = Uri.fromFile(file);
intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);
startActivityForResult(intent, TAKE_PICTURE);
}
@Override
protected void onDestroy() {
super.onDestroy();
tracker.stop();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode != Activity.RESULT_OK)
return;
if (requestCode == TAKE_PICTURE) {
try {
File f = new File(outputFileUri.getPath());
outputFileUri = Uri
.parse(android.provider.MediaStore.Images.Media
.insertImage(getContentResolver(),
f.getAbsolutePath(), null, null));
// f.delete();
setSelectedImageThumbnail(outputFileUri);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
} else if (requestCode == SELECT_IMAGE) {
outputFileUri = data.getData();
setSelectedImageThumbnail(outputFileUri);
}
}
private void setSelectedImageThumbnail(Uri image) {
try {
ImageView iv = (ImageView) findViewById(R.id.selectedImage);
try {
iv.setImageURI(image);
} catch (OutOfMemoryError ome) {
Log.e("ttTumblr", ome.getMessage());
}
iv.setScaleType(ImageView.ScaleType.FIT_CENTER);
iv.invalidate();
} catch (Exception e) {
Log.d("ttTumblr", e.getMessage());
}
}
private String getRealPathFromURI(Uri contentUri) {
try {
String[] proj = { MediaStore.Images.Media.DATA };
Cursor cursor = managedQuery(contentUri, proj, null, null, null);
int column_index = cursor
.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
} catch (Exception ex) {
return "";
}
}
private void uploadImage() {
if (outputFileUri == null) {
Toast.makeText(this, "No image to upload!", Toast.LENGTH_SHORT)
.show();
return;
}
EditText text = (EditText) findViewById(R.id.tbImageCaption);
final String caption = text.getText().toString();
Intent uploadIntent = new Intent(TumblrService.ACTION_POST_PHOTO);
uploadIntent.putExtra("photo", getRealPathFromURI(outputFileUri));
uploadIntent.putExtra("caption", caption);
uploadIntent.putExtra("options", mPostOptions);
startService(uploadIntent);
setResult(RESULT_OK);
finish();
}
private void selectImage() {
Intent intent = new Intent();
intent.setType("image/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent, "Select Picture"),
SELECT_IMAGE);
}
}
| {
"content_hash": "ac285d571b635bcd2337e3922907dd21",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 80,
"avg_line_length": 28.329787234042552,
"alnum_prop": 0.7264363499812242,
"repo_name": "murat8505/android-ttTumblr",
"id": "d06285755220774765469e23d812277fbe44e4ab",
"size": "5326",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/com/tacticalnuclearstrike/tttumblr/activites/UploadImageActivity.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "63819"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
null
#### Original name
Pucciniastrum crawfordiae-japonicae Hirats. f.
### Remarks
null | {
"content_hash": "b6d685bc1bf406c87860f60d6e3f504c",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 46,
"avg_line_length": 11.615384615384615,
"alnum_prop": 0.7284768211920529,
"repo_name": "mdoering/backbone",
"id": "0bdcf8da35aea81ec383f0cd794fba914f4ceb17",
"size": "226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Basidiomycota/Pucciniomycetes/Pucciniales/Uredo/Uredo crawfurdiae-japonicae/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""
Takes care of reading from BAM files and scattering (parallelizing) pipelines
See docs/bamtoolz.md for algorithm details"""
from collections import OrderedDict
import time
from multiprocessing import Process, Queue
import queue
import pysam
import cytoolz.curried as cyt
import logging
logger = logging.getLogger(__name__)
def get_seq_dict(bam_fname):
"""Return us a dictionary converting reference_id to sequence name
:param bam_fname:
:return:
"""
fp = pysam.AlignmentFile(bam_fname)
contig_dict = {n: cn['SN'] for n, cn in enumerate(fp.header['SQ'])}
contig_dict[-1] = '*'
return contig_dict
def read_bam_st(bam_fname):
"""Vanilla single thread single read iterator
:param bam_fname:
:return: iterator over single read tuples (read,)
"""
for read in pysam.AlignmentFile(bam_fname).fetch(until_eof=True):
if read.flag & 0b100100000000: continue # Skip supplementary or secondary alignments
yield (read,)
def read_bam_paired_st(bam_fname):
"""Vanilla single thread paired read iterator
:param bam_fname:
:return: iterator over paired read tuples (read1, read2)
"""
singles = {}
for read in pysam.AlignmentFile(bam_fname).fetch(until_eof=True):
if read.flag & 0b100100000000: continue # Skip supplementary or secondary alignments
key = read.qname[:20] # Is this enough?
if key not in singles:
singles[key] = read
else: # You complete me
yield (read, singles[key]) if read.is_read1 else (singles[key], read)
del singles[key]
if len(singles):
logger.error('{} unpaired reads left over!'.format(len(singles)))
logger.error(singles.keys())
def read_iter(fp, contig_q):
"""Returns read objects from contigs until someone passes None as a contig
:param fp: BAM file pointer (pysam.AlignmentFile)
:param contig_q: a queue into which we put contig information
(contig, eof_true) - eof_true is set if this is
the last non-empty contig and we want to pull out
all the trailing unmapped reads right after the contig
:return: a generator
"""
for contig in iter(contig_q.get, None):
logger.debug(contig[0])
for read in fp.fetch(contig[0]):
if read.flag & 0b100100000000: continue # Skip supplementary or secondary alignments
yield read
if contig[1]: # Now want the trailing reads - fp is positioned just before them
for read in fp.fetch(until_eof=contig[1]):
if read.flag & 0b100100000000: continue # Skip supplementary or secondary alignments
yield read
def unpaired_read_iter(fp, contig_q):
"""Almost identical to read_iter, except it returns tuples (read,)
This enables us to write processing code that operates both on single
reads as well as pairs since they both come as tuples
:param fp: BAM file pointer (pysam.AlignmentFile)
:param contig_q: a queue into which we put contig information
(contig, eof_true) - eof_true is set if this is
the last non-empty contig and we want to pull out
all the trailing unmapped reads
:return: a generator that yields (read,) tuples
"""
for read in read_iter(fp, contig_q):
yield (read,)
def paired_read_iter(fp, contig_q, singles_q, max_singles=1000,
is_singles_mixer=False, single_src_cnt=None):
"""
:param fp: pysam.AlignmentFile()
:param contig_q: Messages are of the form (ref, eof)
ref is the name of the contig to fetch
eof is T/F and indicates whether we should fetch till eof
the sender should set this to T only if this is the last
non-empty contig and is followed by the unmapped reads
that sit at the end of the file.
:param singles_q: Messages are SAM strings of reads converted using tostring()
reads are recieved and mixed in with the from-disk stream
if this is the singles mixer, else, unpaired reads are
sent to this Q
:param max_singles: When we have these many singles, start passing then to the
singles mixer
:param is_singles_mixer: Set True if this is also the "singles mixer" that
receives unpaired reads from other workers
:param single_src_cnt: How many processes are out there sending singles?
Used if this is a singles mixer
:return: a generator that yields paired read tuples (read1, read2)
"""
ref_dict = dict([(r, n) for n, r in enumerate(fp.references)] + [('*', -1)])
# unmapped with no contig
ri = read_iter(fp, contig_q)
singles = OrderedDict()
while 1:
if is_singles_mixer:
try:
read_str = singles_q.get_nowait() # Any singles hanging about?
if read_str is None: # One process says they are done with singles
single_src_cnt -= 1
if single_src_cnt == 0: # Everyone says they've sent all their singles
read = None
else:
continue # At least one more source of singles about
else:
read = fromstring(read_str, ref_dict)
except queue.Empty:
read = next(ri, None)
if read is None:
time.sleep(0.01) # We are out of contigs and we should keep checking the singles Q
continue
else:
read = next(ri, None)
if read is None: # Out of reads from contigs and, if we are a mixer, out of reads from singles_q
break
key = read.qname[:20] # Is this enough?
if key not in singles:
singles[key] = read
else: # You complete me
yield (read, singles[key]) if read.is_read1 else (singles[key], read)
del singles[key]
if not is_singles_mixer:
if len(singles) > max_singles: # Flush earliest singles
singles_q.put(singles.popitem(last=False).tostring(fp))
# We need to send the remaining singles to the mixer
if not is_singles_mixer:
for read in singles.values():
singles_q.put(read.tostring(fp))
singles_q.put(None) # No more singles from us
def worker(pipeline, bam_fname, result_q, contig_q,
paired=False, singles_q=None, max_singles=1000,
is_singles_mixer=False, single_src_cnt=None):
"""Given a pipeline, run it with reads from the given bam taken from contigs supplied
over the contig_q.
This expects the pipeline to yield one final result which it can then return.
It expects the last element of pipeline to be a function that consumes a read iterator and returns a result.
This is more flexible than you think, since the result can be an iterator, so this can be
used to filter reads in parallel. See examples in the filter analysis tutorial
:param pipeline: A list of pipelines
:param bam_fname: Source BAM file
:param result_q: The result is put here.
:param contig_q: messages are of the form (ref, True/False)
ref is the name of the contig
True/False indicates if eof should be set T/F
This controls whether we read to end of file including all the
unmapped reads. The caller figures out if this is that last
contig that sits just before that tail of unmapped reads at the end
of the BAM file
:param paired: Do we pair the reads before passing them to the pipeline?
:param singles_q: messages are SAM strings of reads converted using tostring().
This is only used/relevant if paired=True because we use that to
collect the singles from all contigs and pair them up
Depending on whether this is the last
:param max_singles: When we have these many singles, start passing then to the
singles mixer
:param is_singles_mixer: Set True if this is also the "singles mixer" that
receives unpaired reads from other workers
:param single_src_cnt: How many sources of singles we have
This is
:return:
"""
if paired and singles_q is None:
raise RuntimeError('Need singles_q to be defined if using paired reads')
fp = pysam.AlignmentFile(bam_fname)
if paired:
t1 = paired_read_iter(fp, contig_q,
singles_q=singles_q, max_singles=max_singles,
is_singles_mixer=is_singles_mixer, single_src_cnt=single_src_cnt)
else:
t1 = unpaired_read_iter(fp, contig_q)
sink = pipeline[-1]
result_q.put(sink(cyt.pipe(t1, *pipeline[:-1])))
def scatter(pipeline, bam_fname, paired=False, ncpus=2, max_singles=1000):
"""Given a pipeline and a source bam file use multiprocessing to run the pipeline
via multiple workers splitting up the work by contig
python multiprocessing will be used for running the pipelines in parallel and care
must be taken to ensure the individual pipeline nodes are parallelizable
This expects the pipeline to yield one final result which it can then return.
It expects the last element of pipeline to be a function that consumes a read iterator and returns a result.
This is more flexible than you think, since the result can be an iterator, so this can be
used to filter reads in parallel. See examples in the filter analysis tutorial
:param bam_fname:
:param pipeline:
:param paired: When run in parallel, paired vs unpaired pipelines work differently
So we have to tell scatter if we want to source paired or unpaired reads
:param ncpus:
:param max_singles:
:return:
"""
assert ncpus > 1, "ncpus = 1 can't use scatter!"
result_q = Queue()
contig_q = Queue()
if paired:
singles_q = Queue()
is_mixer = [False] * (ncpus - 1) + [True]
else:
singles_q = False
is_mixer = [False] * ncpus
p_list = []
for i in range(ncpus):
p_list += [
Process(target=worker,
args=(pipeline, bam_fname, result_q, contig_q,
paired, singles_q, max_singles,
is_mixer[i], ncpus - 1))
]
for p in p_list:
p.start()
_contigs = find_non_empty_contigs(bam_fname)
contigs = [(c, False) for c in _contigs[:-1]] + [(_contigs[-1], True)]
# This ensures that we read till EOF for the last contig and thereby fetch all of the trailing unmapped reads
for contig in contigs:
contig_q.put(contig)
# Tell child processes to stop
for i in range(ncpus):
contig_q.put(None)
for i in range(ncpus):
yield result_q.get()
# Orderly exit
for p in p_list:
p.join()
def find_non_empty_contigs(bam_fname):
# Thanks to Güneş Bayir for suggesting a proper algorithm to pull the unmapped reads
contigs = []
fp = pysam.AlignmentFile(bam_fname)
for ref in fp.references:
for _ in fp.fetch(ref):
contigs += [ref]
break
return contigs
def fromstring(s, ref_dict):
"""Inverse of pysam.AlignedSegment.tostring(): given a string, create an aligned segment
:param s:
:param ref_dict: ref_dict = dict([(r, n) for n, r in enumerate(fp.references)] + [('*', -1)])
:return:
"""
def _split(_s):
qname, flag, rname, pos, \
mapping_quality, cigarstring, \
rnext, pnext, template_length, seq, qual, *_tg = _s.split('\t')
flag = int(flag)
rname = ref_dict[rname] # dict must have '*': -1 entry too
pos = int(pos)
mapping_quality = int(mapping_quality)
rnext = rname if rnext == '=' else ref_dict[rnext]
pnext = int(pnext)
template_length = int(template_length)
return qname, flag, rname, pos, \
mapping_quality, cigarstring, \
rnext, pnext, template_length, seq, qual, _tg
# So close, pysam.tostring, so close
def _tags(_t):
_tl = _t.split(':')
if _tl[1] == 'i':
_tl[2] = int(_tl[2])
elif _tl[1] == 'f':
_tl[2] = float(_tl[2])
return _tl[0], _tl[2], _tl[1]
r = pysam.AlignedSegment()
r.qname, r.flag, r.rname, r.pos, \
r.mapping_quality, r.cigarstring, \
r.rnext, r.pnext, r.template_length, r.seq, r.qual, tags = _split(s)
r.set_tags([_tags(t) for t in tags])
return r | {
"content_hash": "90498d9e94927427f1f945fe611b023a",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 111,
"avg_line_length": 36.36873156342183,
"alnum_prop": 0.6381701678968286,
"repo_name": "sbg/Mitty",
"id": "3e6139fe1a7c9a3042638b070028f7c3e52b865e",
"size": "12331",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mitty/analysis/bamtoolz.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "147223"
},
{
"name": "Python",
"bytes": "239645"
},
{
"name": "Shell",
"bytes": "834"
}
],
"symlink_target": ""
} |
// MITK
#include "mitkGeometryDataReaderService.h"
#include "mitkGeometry3DToXML.h"
#include "mitkIOMimeTypes.h"
#include "mitkProportionalTimeGeometryToXML.h"
// STL
#include <mitkLocaleSwitch.h>
#include <tinyxml2.h>
mitk::GeometryDataReaderService::GeometryDataReaderService()
: AbstractFileReader(IOMimeTypes::GEOMETRY_DATA_MIMETYPE(), "MITK Geometry Data Reader")
{
RegisterService();
}
mitk::GeometryDataReaderService::~GeometryDataReaderService()
{
}
std::vector<itk::SmartPointer<mitk::BaseData>> mitk::GeometryDataReaderService::DoRead()
{
// Switch the current locale to "C"
LocaleSwitch localeSwitch("C");
std::vector<itk::SmartPointer<BaseData>> result;
InputStream stream(this);
std::string s(std::istreambuf_iterator<char>{stream.rdbuf()}, std::istreambuf_iterator<char>());
tinyxml2::XMLDocument doc;
doc.Parse(s.c_str(), s.size());
if (!doc.Error())
{
tinyxml2::XMLHandle docHandle(&doc);
for (auto *geomDataElement = docHandle.FirstChildElement("GeometryData").ToElement();
geomDataElement != nullptr;
geomDataElement = geomDataElement->NextSiblingElement())
{
for (auto *currentElement = geomDataElement->FirstChildElement(); currentElement != nullptr;
currentElement = currentElement->NextSiblingElement())
{
// different geometries could have been serialized from a GeometryData
// object:
std::string tagName = currentElement->Value();
if (tagName == "Geometry3D")
{
Geometry3D::Pointer restoredGeometry = Geometry3DToXML::FromXML(currentElement);
if (restoredGeometry.IsNotNull())
{
GeometryData::Pointer newGeometryData = GeometryData::New();
newGeometryData->SetGeometry(restoredGeometry);
result.push_back(newGeometryData.GetPointer());
}
else
{
MITK_ERROR << "Invalid <Geometry3D> tag encountered. Skipping.";
}
}
else if (tagName == "ProportionalTimeGeometry")
{
ProportionalTimeGeometry::Pointer restoredTimeGeometry =
ProportionalTimeGeometryToXML::FromXML(currentElement);
if (restoredTimeGeometry.IsNotNull())
{
GeometryData::Pointer newGeometryData = GeometryData::New();
newGeometryData->SetTimeGeometry(restoredTimeGeometry);
result.push_back(newGeometryData.GetPointer());
}
else
{
MITK_ERROR << "Invalid <ProportionalTimeGeometry> tag encountered. Skipping.";
}
}
} // for child of <GeometryData>
} // for <GeometryData>
}
else
{
mitkThrow() << doc.ErrorStr();
}
if (result.empty())
{
mitkThrow() << "Did not read a single GeometryData object from input.";
}
return result;
}
mitk::GeometryDataReaderService::GeometryDataReaderService(const mitk::GeometryDataReaderService &other)
: mitk::AbstractFileReader(other)
{
}
mitk::GeometryDataReaderService *mitk::GeometryDataReaderService::Clone() const
{
return new GeometryDataReaderService(*this);
}
| {
"content_hash": "19dd76b20425d003e2ea26fcebff8f1a",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 104,
"avg_line_length": 30.42718446601942,
"alnum_prop": 0.6678366305041481,
"repo_name": "MITK/MITK",
"id": "8731ae70e8e07beefa8e80c38e6b3275727f33f5",
"size": "3515",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Modules/Core/src/IO/mitkGeometryDataReaderService.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "60"
},
{
"name": "C",
"bytes": "160965"
},
{
"name": "C++",
"bytes": "29329826"
},
{
"name": "CMake",
"bytes": "997356"
},
{
"name": "CSS",
"bytes": "5894"
},
{
"name": "HTML",
"bytes": "78294"
},
{
"name": "JavaScript",
"bytes": "1044"
},
{
"name": "Makefile",
"bytes": "788"
},
{
"name": "Objective-C",
"bytes": "8783"
},
{
"name": "Python",
"bytes": "545"
},
{
"name": "SWIG",
"bytes": "28530"
},
{
"name": "Shell",
"bytes": "56972"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>livelessons</groupId>
<artifactId>livelessons-parent</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>pom</packaging>
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-parent</artifactId>
<version>1.0.1.RELEASE</version>
<relativePath />
</parent>
<properties>
<spring-security.version>4.0.1.RELEASE</spring-security.version>
<java.version>1.8</java.version>
<spring-security-oauth.version>2.0.6.RELEASE</spring-security-oauth.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.security.oauth</groupId>
<artifactId>spring-security-oauth2</artifactId>
<version>${spring-security-oauth.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-bom</artifactId>
<version>${spring-security.version}</version>
<scope>import</scope>
<type>pom</type>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>
<repository>
<id>spring-snapshots</id>
<name>Spring Snapshots</name>
<url>http://repo.spring.io/libs-snapshot-local</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
<repository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>http://repo.spring.io/libs-milestone-local</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>spring-releases</id>
<name>Spring Releases</name>
<url>http://repo.spring.io/libs-release-local</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
</project>
| {
"content_hash": "c3bbeda2c5db5ff81554e496afc373c2",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 104,
"avg_line_length": 30.9,
"alnum_prop": 0.7031900138696255,
"repo_name": "sleyzerzon/building-microservices",
"id": "9a75ef5ba2d3682251ed2496172bc3e1037edd5f",
"size": "2163",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "livelessons-parent/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6743"
},
{
"name": "Groovy",
"bytes": "110"
},
{
"name": "HTML",
"bytes": "9842"
},
{
"name": "Java",
"bytes": "119699"
},
{
"name": "JavaScript",
"bytes": "24986"
},
{
"name": "Shell",
"bytes": "104"
}
],
"symlink_target": ""
} |
module UserActivation
extend ActiveSupport::Concern
def handle_user_activation_request
handle_action :activate
end
def handle_user_deactivation_request
handle_action :deactivate
end
private
def handle_action action
unless [:activate, :deactivate].include?(action.to_sym)
Rails.logger.error "Action `#{action}` not supported"
render_error 'User status change failed', :unprocessable_entity and return
end
case action.to_sym
when :activate
@user.with_lock do
UserActivationService.activate! @user
end
when :deactivate
@user.with_lock do
UserActivationService.deactivate! @user
# We handle Kube tokens deletion here and not in the UserActivationService
# because this is about internal data/resource clean up and not about
# onboarding/offboarding from services.
if @user.kubernetes_identity
@user.kubernetes_identity.tokens.each do |token|
token.destroy
AuditService.log(
context: audit_context,
action: 'destroy',
auditable: token,
data: {
cluster: token.cluster.name,
obfuscated_token: token.obfuscated_token
},
comment: "Token deleted whilst deactivating (user: #{@user.email})"
)
end
end
end
end
AuditService.log(
context: audit_context,
action: action.to_s,
auditable: @user
)
head :no_content
rescue Agents::KeycloakAgentService::Errors::KeycloakIdentityMissing
message = 'User keycloak identity missing'
Rails.logger.error message
render_error message, :unprocessable_entity and return
rescue Agents::KeycloakAgentService::Errors::KeycloakUserRepresentationMissing
message = 'Could not retrieve user representation from Keycloak'
Rails.logger.error message
render_error message, :unprocessable_entity and return
rescue Agents::KeycloakAgentService::Errors::KeycloakUserRepresentationUpdateFailed
message = 'Could not update user representation in Keycloak'
Rails.logger.error message
render_error message, :unprocessable_entity and return
rescue Agents::KeycloakAgentService::Errors::KeycloakAccessTokenRequestFailed
message = 'Could not obtain Keycloak auth token'
Rails.logger.error message
render_error message, :unprocessable_entity and return
rescue => e
Rails.logger.error "User status change failed - #{e.class}: #{e.message}"
render_error 'User status change failed', :unprocessable_entity and return
end
end
| {
"content_hash": "34e243902b99585d968a441e1c0e9396",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 85,
"avg_line_length": 33.41772151898734,
"alnum_prop": 0.6856060606060606,
"repo_name": "UKHomeOffice/platform-hub",
"id": "0d9e3927ff8c7b25b68ed2abb45729e72d5516b9",
"size": "2640",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "platform-hub-api/app/controllers/concerns/user_activation.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1570"
},
{
"name": "HTML",
"bytes": "322913"
},
{
"name": "JavaScript",
"bytes": "411238"
},
{
"name": "Ruby",
"bytes": "884172"
},
{
"name": "SCSS",
"bytes": "3713"
},
{
"name": "Shell",
"bytes": "7162"
}
],
"symlink_target": ""
} |
namespace PKStudio.Forms.Output
{
partial class ErrorListForm
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(ErrorListForm));
this.toolStrip1 = new System.Windows.Forms.ToolStrip();
this.ErrorsBtn = new System.Windows.Forms.ToolStripButton();
this.toolStripSeparator1 = new System.Windows.Forms.ToolStripSeparator();
this.WarningsBtn = new System.Windows.Forms.ToolStripButton();
this.toolStripSeparator2 = new System.Windows.Forms.ToolStripSeparator();
this.listView1 = new System.Windows.Forms.ListView();
this.IcoCol = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
this.NumberCol = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
this.DescriptionCol = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
this.FileCol = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
this.LineCol = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
this.imageList1 = new System.Windows.Forms.ImageList(this.components);
this.toolStrip1.SuspendLayout();
this.SuspendLayout();
//
// toolStrip1
//
this.toolStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.ErrorsBtn,
this.toolStripSeparator1,
this.WarningsBtn,
this.toolStripSeparator2});
this.toolStrip1.Location = new System.Drawing.Point(0, 0);
this.toolStrip1.Name = "toolStrip1";
this.toolStrip1.Size = new System.Drawing.Size(621, 25);
this.toolStrip1.TabIndex = 0;
this.toolStrip1.Text = "toolStrip1";
//
// ErrorsBtn
//
this.ErrorsBtn.Checked = true;
this.ErrorsBtn.CheckState = System.Windows.Forms.CheckState.Checked;
this.ErrorsBtn.Image = global::PKStudio.Properties.Resources.delete;
this.ErrorsBtn.ImageTransparentColor = System.Drawing.Color.Magenta;
this.ErrorsBtn.Name = "ErrorsBtn";
this.ErrorsBtn.Size = new System.Drawing.Size(66, 22);
this.ErrorsBtn.Text = "0 Errors";
this.ErrorsBtn.Click += new System.EventHandler(this.ErrorsBtn_Click);
//
// toolStripSeparator1
//
this.toolStripSeparator1.Name = "toolStripSeparator1";
this.toolStripSeparator1.Size = new System.Drawing.Size(6, 25);
//
// WarningsBtn
//
this.WarningsBtn.Checked = true;
this.WarningsBtn.CheckState = System.Windows.Forms.CheckState.Checked;
this.WarningsBtn.Image = global::PKStudio.Properties.Resources.warning;
this.WarningsBtn.ImageTransparentColor = System.Drawing.Color.Magenta;
this.WarningsBtn.Name = "WarningsBtn";
this.WarningsBtn.Size = new System.Drawing.Size(86, 22);
this.WarningsBtn.Text = "0 Warnings";
this.WarningsBtn.Click += new System.EventHandler(this.WarningsBtn_Click);
//
// toolStripSeparator2
//
this.toolStripSeparator2.Name = "toolStripSeparator2";
this.toolStripSeparator2.Size = new System.Drawing.Size(6, 25);
//
// listView1
//
this.listView1.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
this.IcoCol,
this.NumberCol,
this.DescriptionCol,
this.FileCol,
this.LineCol});
this.listView1.Dock = System.Windows.Forms.DockStyle.Fill;
this.listView1.FullRowSelect = true;
this.listView1.LabelWrap = false;
this.listView1.Location = new System.Drawing.Point(0, 25);
this.listView1.MultiSelect = false;
this.listView1.Name = "listView1";
this.listView1.ShowGroups = false;
this.listView1.Size = new System.Drawing.Size(621, 366);
this.listView1.SmallImageList = this.imageList1;
this.listView1.TabIndex = 1;
this.listView1.UseCompatibleStateImageBehavior = false;
this.listView1.View = System.Windows.Forms.View.Details;
this.listView1.SizeChanged += new System.EventHandler(this.listView1_SizeChanged);
this.listView1.MouseDoubleClick += new System.Windows.Forms.MouseEventHandler(this.listView1_MouseDoubleClick);
//
// IcoCol
//
this.IcoCol.Text = "";
this.IcoCol.Width = 20;
//
// NumberCol
//
this.NumberCol.Text = "";
this.NumberCol.Width = 20;
//
// DescriptionCol
//
this.DescriptionCol.Text = "Description";
this.DescriptionCol.Width = 368;
//
// FileCol
//
this.FileCol.Text = "File";
this.FileCol.Width = 100;
//
// LineCol
//
this.LineCol.Text = "Line";
this.LineCol.Width = 35;
//
// imageList1
//
this.imageList1.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("imageList1.ImageStream")));
this.imageList1.TransparentColor = System.Drawing.Color.Transparent;
this.imageList1.Images.SetKeyName(0, "delete.png");
this.imageList1.Images.SetKeyName(1, "warning.png");
//
// ErrorListForm
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(621, 391);
this.Controls.Add(this.listView1);
this.Controls.Add(this.toolStrip1);
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.Name = "ErrorListForm";
this.Text = "Error List";
this.toolStrip1.ResumeLayout(false);
this.toolStrip1.PerformLayout();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.ToolStrip toolStrip1;
private System.Windows.Forms.ToolStripButton ErrorsBtn;
private System.Windows.Forms.ToolStripSeparator toolStripSeparator1;
private System.Windows.Forms.ToolStripButton WarningsBtn;
private System.Windows.Forms.ToolStripSeparator toolStripSeparator2;
private System.Windows.Forms.ListView listView1;
private System.Windows.Forms.ColumnHeader IcoCol;
private System.Windows.Forms.ColumnHeader NumberCol;
private System.Windows.Forms.ColumnHeader DescriptionCol;
private System.Windows.Forms.ColumnHeader FileCol;
private System.Windows.Forms.ColumnHeader LineCol;
private System.Windows.Forms.ImageList imageList1;
}
} | {
"content_hash": "27a7665b6e795b5cea42b7972d294369",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 145,
"avg_line_length": 45.9,
"alnum_prop": 0.6017913338174776,
"repo_name": "AlexandrSurkov/PKStudio",
"id": "06d62501f8033b93e1e33388f416213501f5cc1f",
"size": "8264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PKStudio/Forms/Output/ErrorListForm.Designer.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "2266338"
},
{
"name": "HTML",
"bytes": "7288"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.dynamodbv2.document;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import com.amazonaws.services.dynamodbv2.document.internal.InternalUtils;
/**
* Used to represent a primary key that has one or multiple key components.
*/
public class PrimaryKey {
private final Map<String, KeyAttribute> components = new LinkedHashMap<String, KeyAttribute>();
public PrimaryKey() {}
/**
* Constructs with the specified key components.
*/
public PrimaryKey(KeyAttribute ...components) {
addComponents(components);
}
/**
* Constructs with a hash key.
*/
public PrimaryKey(String hashKeyName, Object hashKeyValue) {
addComponent(hashKeyName, hashKeyValue);
}
/**
* Constructs with a hash key and a range key.
*/
public PrimaryKey(String hashKeyName, Object hashKeyValue,
String rangeKeyName, Object rangeKeyValue) {
if (hashKeyName.equals(rangeKeyName))
throw new IllegalArgumentException("hashKyeName must not be the same as the rangeKeyName");
addComponent(hashKeyName, hashKeyValue);
addComponent(rangeKeyName, rangeKeyValue);
}
/**
* Returns all the key components of this primary key.
*/
public Collection<KeyAttribute> getComponents() {
return components.values();
}
/**
* Returns all the key component names of this primary key as a set.
*/
public Set<String> getComponentNameSet() {
return components.keySet();
}
/**
* Returns true if this primary has the specified key attribute name;
* false otherwise.
*/
public boolean hasComponent(String attrName) {
return components.containsKey(attrName);
}
/**
* Add one or multiple key components to this primary key.
*
* Note adding a key component with the same name as that of an existing
* one would overwrite and become a single key component instead of two.
*/
public PrimaryKey addComponents(KeyAttribute ... components) {
if (components != null) {
for (KeyAttribute ka: components) {
InternalUtils.rejectNullInput(ka);
this.components.put(ka.getName(), ka);
}
}
return this;
}
/**
* Add a key component to this primary key.
*
* Note adding a key component with the same name as that of an existing
* one would overwrite and become a single key component instead of two.
*/
public PrimaryKey addComponent(String keyAttributeName, Object keyAttributeValue) {
components.put(keyAttributeName,
new KeyAttribute(keyAttributeName, keyAttributeValue));
return this;
}
@Override
public String toString() {
return String.valueOf(components);
}
@Override
public int hashCode() {
return components.hashCode();
}
@Override
public boolean equals(Object in) {
if (in instanceof PrimaryKey) {
PrimaryKey that = (PrimaryKey)in;
return this.components.equals(that.components);
} else {
return false;
}
}
}
| {
"content_hash": "a55ccf1b4bf9bbf2dd9a76775be7062b",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 103,
"avg_line_length": 28.884955752212388,
"alnum_prop": 0.6446078431372549,
"repo_name": "trasa/aws-sdk-java",
"id": "c8acfba5392aeece9083f7c851bb16f45a870937",
"size": "3848",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/document/PrimaryKey.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "100011199"
},
{
"name": "Scilab",
"bytes": "2354"
}
],
"symlink_target": ""
} |
package org.wso2.carbon.identity.oauth2.model;
import org.wso2.carbon.identity.oauth2.util.OAuth2Util;
import java.io.Serializable;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* A Bean class which is used to store the OAuth parameters available in a OAuth request in the Cache.
*/
public class OAuth2Parameters implements Serializable {
private static final long serialVersionUID = -8719088680725780804L;
private String applicationName;
private String redirectURI;
private Set<String> scopes;
private String state;
private String responseType;
private String clientId;
private String nonce;
private String display;
private String prompt;
private String id_token_hint;
private String login_hint;
private LinkedHashSet acrValues;
public String getApplicationName() {
return applicationName;
}
public void setApplicationName(String applicationName) {
this.applicationName = OAuth2Util.getSafeText(applicationName);
}
public String getRedirectURI() {
return redirectURI;
}
public void setRedirectURI(String redirectURI) {
this.redirectURI = OAuth2Util.getSafeText(redirectURI);
}
public Set<String> getScopes() {
return scopes;
}
public void setScopes(Set<String> scopes) {
this.scopes = scopes;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = OAuth2Util.getSafeText(state);
}
public String getResponseType() {
return responseType;
}
public void setResponseType(String responseType) {
this.responseType = OAuth2Util.getSafeText(responseType);
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = OAuth2Util.getSafeText(clientId);
}
/**
* @return the nonce
*/
public String getNonce() {
return nonce;
}
/**
* @param nonce the nonce to set
*/
public void setNonce(String nonce) {
this.nonce = OAuth2Util.getSafeText(nonce);
}
/**
* @return the display
*/
public String getDisplay() {
return display;
}
/**
* @param display the display to set
*/
public void setDisplay(String display) {
this.display = OAuth2Util.getSafeText(display);
}
/**
* @return the prompt
*/
public String getPrompt() {
return prompt;
}
/**
* @param prompt the prompt to set
*/
public void setPrompt(String prompt) {
this.prompt = OAuth2Util.getSafeText(prompt);
}
/**
* @return the id_token_hint
*/
public String getIDTokenHint() {
return id_token_hint;
}
/**
* @param id_token_hint the id_token_hint to set
*/
public void setIDTokenHint(String id_token_hint) {
this.id_token_hint = OAuth2Util.getSafeText(id_token_hint);
}
/**
* @return the login_hint
*/
public String getLoginHint() {
return login_hint;
}
/**
* @param login_hint the login_hint to set
*/
public void setLoginHint(String login_hint) {
this.login_hint = OAuth2Util.getSafeText(login_hint);
}
public LinkedHashSet getACRValues() {
return acrValues;
}
public void setACRValues(LinkedHashSet acrValues) {
this.acrValues = acrValues;
}
}
| {
"content_hash": "dd09f04342d78e5970e1d9f6aa4bfe1c",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 102,
"avg_line_length": 22.403846153846153,
"alnum_prop": 0.6343347639484979,
"repo_name": "prabathabey/carbon-identity",
"id": "3de51a8ca7ff2428e1c45ed5e777bb4789312de9",
"size": "4164",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "components/oauth/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth2/model/OAuth2Parameters.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "104735"
},
{
"name": "HTML",
"bytes": "115445"
},
{
"name": "Java",
"bytes": "11953766"
},
{
"name": "JavaScript",
"bytes": "429004"
},
{
"name": "Objective-C",
"bytes": "13608"
},
{
"name": "PLSQL",
"bytes": "54963"
},
{
"name": "Thrift",
"bytes": "338"
},
{
"name": "XSLT",
"bytes": "1030"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Phyton, Horn 17(1-2): 70 (1975)
#### Original name
Ahlesia lichenicola Fuckel
### Remarks
null | {
"content_hash": "19f766dda491ded0363993034c9543ad",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 14.076923076923077,
"alnum_prop": 0.7049180327868853,
"repo_name": "mdoering/backbone",
"id": "f6e13a8b3ea2d90e3ea089e66819a14ca025c181",
"size": "257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Leotiomycetes/Thelocarpaceae/Thelocarpon/Thelocarpon lichenicola/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_11) on Mon Mar 17 10:51:58 PDT 2014 -->
<title>Uses of Class com.box.boxjavalibv2.requests.DeleteEmailAliasRequest</title>
<meta name="date" content="2014-03-17">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class com.box.boxjavalibv2.requests.DeleteEmailAliasRequest";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../com/box/boxjavalibv2/requests/DeleteEmailAliasRequest.html" title="class in com.box.boxjavalibv2.requests">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/box/boxjavalibv2/requests/class-use/DeleteEmailAliasRequest.html" target="_top">Frames</a></li>
<li><a href="DeleteEmailAliasRequest.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class com.box.boxjavalibv2.requests.DeleteEmailAliasRequest" class="title">Uses of Class<br>com.box.boxjavalibv2.requests.DeleteEmailAliasRequest</h2>
</div>
<div class="classUseContainer">No usage of com.box.boxjavalibv2.requests.DeleteEmailAliasRequest</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../com/box/boxjavalibv2/requests/DeleteEmailAliasRequest.html" title="class in com.box.boxjavalibv2.requests">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/box/boxjavalibv2/requests/class-use/DeleteEmailAliasRequest.html" target="_top">Frames</a></li>
<li><a href="DeleteEmailAliasRequest.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"content_hash": "c0c33b395e6ba9b0757848da485bc9ed",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 169,
"avg_line_length": 37.63478260869565,
"alnum_prop": 0.6284658040665434,
"repo_name": "shelsonjava/box-java-sdk-v2",
"id": "7d3c2b588e34ba2b464660e97992ccc4b12ab308",
"size": "4328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "javadoc/com/box/boxjavalibv2/requests/class-use/DeleteEmailAliasRequest.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "737981"
},
{
"name": "Shell",
"bytes": "1413"
}
],
"symlink_target": ""
} |
cpu = Gem::Platform.local.cpu
version = Rubinius::VERSION.split('.')[0, 2].join '.'
Gem.platforms << Gem::Platform.new([cpu, 'rubinius', version])
module Gem
def self.default_bindir
File.join Rubinius::GEMS_PATH, "bin"
end
def self.default_dir
File.join Rubinius::GEMS_PATH, Gem::ConfigMap[:ruby_version]
end
def self.default_preinstalled_dir
File.join Rubinius::GEMS_PATH, "rubinius", "preinstalled"
end
def self.default_path
dirs = [default_dir]
# This is the same test rubygems/defaults.rb uses
dirs.unshift(Gem.user_dir) if File.exists?(Gem.user_home)
dirs
end
def self.default_exec_format
exec_format = ConfigMap[:ruby_install_name].sub('rbx', '%s') rescue '%s'
unless exec_format =~ /%s/ then
raise Gem::Exception,
"[BUG] invalid exec_format #{exec_format.inspect}, no %s"
end
exec_format
end
end
| {
"content_hash": "cf5276c6a1fdf82d391a4e3ec10faf7c",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 76,
"avg_line_length": 24.054054054054053,
"alnum_prop": 0.6651685393258427,
"repo_name": "travis-repos/rubinius",
"id": "61425c646781b9c8bbd533dcd9ca260c0889ccec",
"size": "939",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/rubygems/defaults/rbx.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2541839"
},
{
"name": "C++",
"bytes": "4643592"
},
{
"name": "JavaScript",
"bytes": "144194"
},
{
"name": "Perl",
"bytes": "256305"
},
{
"name": "Python",
"bytes": "21905"
},
{
"name": "Ruby",
"bytes": "13602665"
},
{
"name": "Scheme",
"bytes": "557"
},
{
"name": "Shell",
"bytes": "64303"
},
{
"name": "Vim script",
"bytes": "671"
}
],
"symlink_target": ""
} |
package com.dqqdo.dochart.util;
import android.content.Context;
import android.view.WindowManager;
/**
* 作者:duqingquan
* 时间:2016/12/23:13:47
* 邮箱:
* 说明:
*/
public class ViewUtil {
public static int screenWidth;
public static int screenHeight;
public static void initScreenInfo(Context context) {
WindowManager wm = (WindowManager) context
.getSystemService(Context.WINDOW_SERVICE);
screenWidth = wm.getDefaultDisplay().getWidth();
screenHeight = wm.getDefaultDisplay().getHeight();
}
}
| {
"content_hash": "b4c5ba53b84fc6321ed9e30e7352bd51",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 58,
"avg_line_length": 20.555555555555557,
"alnum_prop": 0.6810810810810811,
"repo_name": "zmobs/DoChart",
"id": "ba02ca9bf11b775e7e4539585ee071e5b0295766",
"size": "579",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dochart/src/main/java/com/dqqdo/dochart/util/ViewUtil.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Groovy",
"bytes": "740"
},
{
"name": "Java",
"bytes": "259890"
},
{
"name": "PureBasic",
"bytes": "310"
},
{
"name": "Stata",
"bytes": "82"
}
],
"symlink_target": ""
} |
package mcjty.rftools.items.dimlets;
/**
* Created by jorrit on 8/12/14.
*/
public class DimletKey {
private final DimletType type;
private final String name;
public DimletKey(DimletType type, String name) {
this.type = type;
this.name = name;
}
public DimletType getType() {
return type;
}
public String getName() {
return name;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DimletKey dimletKey = (DimletKey) o;
if (type != dimletKey.type) {
return false;
}
if (!name.equals(dimletKey.name)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + name.hashCode();
return result;
}
@Override
public String toString() {
return type.dimletType.getOpcode() + name;
}
public static DimletKey parseKey(String skey) {
String opcode = skey.substring(0, 1);
String name = skey.substring(1);
return new DimletKey(DimletType.getTypeByOpcode(opcode), name);
}
}
| {
"content_hash": "182185ebd10d129e5d702b86d9a5b751",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 71,
"avg_line_length": 21.540983606557376,
"alnum_prop": 0.5509893455098934,
"repo_name": "Elecs-Mods/RFTools",
"id": "023bc4b6929ce53110ef10af46199b1cb1621f27",
"size": "1314",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/main/java/mcjty/rftools/items/dimlets/DimletKey.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "2936438"
}
],
"symlink_target": ""
} |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model backend\models\OperationSearch */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="operation-search box-padding">
<?php $form = ActiveForm::begin([
'action' => ['index'],
'method' => 'get',
]); ?>
<?= $form->field($model, '_id') ?>
<?= $form->field($model, 'uuid') ?>
<?= $form->field($model, 'taskStageUuid') ?>
<?= $form->field($model, 'operationVerdictUuid') ?>
<?= $form->field($model, 'operationStatusUuid') ?>
<?php // echo $form->field($model, 'operationTemplateUuid') ?>
<?php // echo $form->field($model, 'startDate') ?>
<?php // echo $form->field($model, 'endDate') ?>
<?php // echo $form->field($model, 'flowOrder') ?>
<?php // echo $form->field($model, 'createdAt') ?>
<?php // echo $form->field($model, 'changedAt') ?>
<div class="form-group">
<?= Html::submitButton(Yii::t('app', 'Search'), ['class' => 'btn btn-primary']) ?>
<?= Html::resetButton(Yii::t('app', 'Reset'), ['class' => 'btn btn-default']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| {
"content_hash": "1366b5c5197b4dfbaaae960c42a0e5e6",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 90,
"avg_line_length": 25.19148936170213,
"alnum_prop": 0.5464527027027027,
"repo_name": "mikaelwasp/toir-server",
"id": "a88a437e885411996d6a73457c160659b1e7f3d6",
"size": "1184",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "backend/views/operation/_search.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1546"
},
{
"name": "CSS",
"bytes": "28188"
},
{
"name": "JavaScript",
"bytes": "15974"
},
{
"name": "PHP",
"bytes": "1000304"
},
{
"name": "Shell",
"bytes": "3256"
}
],
"symlink_target": ""
} |
FOUNDATION_EXPORT double AudioBotVersionNumber;
//! Project version string for AudioBot.
FOUNDATION_EXPORT const unsigned char AudioBotVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <AudioBot/PublicHeader.h>
| {
"content_hash": "b1a33501fb256dc7c2ee64d6b69df3c3",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 133,
"avg_line_length": 36.25,
"alnum_prop": 0.8103448275862069,
"repo_name": "nixzhu/AudioBot",
"id": "a9bc42486f79edea8e834826b4eff60aac5d2eea",
"size": "481",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "AudioBot/AudioBot.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "481"
},
{
"name": "Ruby",
"bytes": "823"
},
{
"name": "Swift",
"bytes": "42401"
}
],
"symlink_target": ""
} |
package org.camunda.bpm.engine.test.api.mgmt;
import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.inverted;
import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.jobByPriority;
import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.verifySortingAndCount;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.camunda.bpm.engine.runtime.Job;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.test.Deployment;
import org.camunda.bpm.engine.test.util.PluggableProcessEngineTest;
import org.camunda.bpm.engine.variable.Variables;
import org.junit.Test;
/**
* @author Thorben Lindhauer
*
*/
public class JobQueryByPriorityTest extends PluggableProcessEngineTest {
@Deployment(resources = "org/camunda/bpm/engine/test/api/mgmt/jobPrioExpressionProcess.bpmn20.xml")
@Test
public void testOrderByPriority() {
// given five jobs with priorities from 1 to 5
List<ProcessInstance> instances = new ArrayList<ProcessInstance>();
for (int i = 0; i < 5; i++) {
instances.add(runtimeService.startProcessInstanceByKey("jobPrioExpressionProcess",
Variables.createVariables().putValue("priority", i)));
}
// then querying and ordering by priority works
verifySortingAndCount(managementService.createJobQuery().orderByJobPriority().asc(), 5, jobByPriority());
verifySortingAndCount(managementService.createJobQuery().orderByJobPriority().desc(), 5, inverted(jobByPriority()));
}
@Deployment(resources = "org/camunda/bpm/engine/test/api/mgmt/jobPrioExpressionProcess.bpmn20.xml")
@Test
public void testFilterByJobPriorityLowerThanOrEquals() {
// given five jobs with priorities from 1 to 5
List<ProcessInstance> instances = new ArrayList<ProcessInstance>();
for (int i = 0; i < 5; i++) {
instances.add(runtimeService.startProcessInstanceByKey("jobPrioExpressionProcess",
Variables.createVariables().putValue("priority", i)));
}
// when making a job query and filtering by job priority
// then the correct jobs are returned
List<Job> jobs = managementService.createJobQuery().priorityLowerThanOrEquals(2).list();
assertEquals(3, jobs.size());
Set<String> processInstanceIds = new HashSet<String>();
processInstanceIds.add(instances.get(0).getId());
processInstanceIds.add(instances.get(1).getId());
processInstanceIds.add(instances.get(2).getId());
for (Job job : jobs) {
assertTrue(job.getPriority() <= 2);
assertTrue(processInstanceIds.contains(job.getProcessInstanceId()));
}
}
@Deployment(resources = "org/camunda/bpm/engine/test/api/mgmt/jobPrioExpressionProcess.bpmn20.xml")
@Test
public void testFilterByJobPriorityLowerThanOrEqualsAndHigherThanOrEqual() {
// given five jobs with priorities from 1 to 5
List<ProcessInstance> instances = new ArrayList<ProcessInstance>();
for (int i = 0; i < 5; i++) {
instances.add(runtimeService.startProcessInstanceByKey("jobPrioExpressionProcess",
Variables.createVariables().putValue("priority", i)));
}
// when making a job query and filtering by disjunctive job priority
// then the no jobs are returned
assertEquals(0, managementService.createJobQuery().priorityLowerThanOrEquals(2).priorityHigherThanOrEquals(3).count());
}
@Deployment(resources = "org/camunda/bpm/engine/test/api/mgmt/jobPrioExpressionProcess.bpmn20.xml")
@Test
public void testFilterByJobPriorityHigherThanOrEquals() {
// given five jobs with priorities from 1 to 5
List<ProcessInstance> instances = new ArrayList<ProcessInstance>();
for (int i = 0; i < 5; i++) {
instances.add(runtimeService.startProcessInstanceByKey("jobPrioExpressionProcess",
Variables.createVariables().putValue("priority", i)));
}
// when making a job query and filtering by job priority
// then the correct jobs are returned
List<Job> jobs = managementService.createJobQuery().priorityHigherThanOrEquals(2L).list();
assertEquals(3, jobs.size());
Set<String> processInstanceIds = new HashSet<String>();
processInstanceIds.add(instances.get(2).getId());
processInstanceIds.add(instances.get(3).getId());
processInstanceIds.add(instances.get(4).getId());
for (Job job : jobs) {
assertTrue(job.getPriority() >= 2);
assertTrue(processInstanceIds.contains(job.getProcessInstanceId()));
}
}
@Deployment(resources = "org/camunda/bpm/engine/test/api/mgmt/jobPrioExpressionProcess.bpmn20.xml")
@Test
public void testFilterByJobPriorityLowerAndHigher() {
// given five jobs with priorities from 1 to 5
List<ProcessInstance> instances = new ArrayList<ProcessInstance>();
for (int i = 0; i < 5; i++) {
instances.add(runtimeService.startProcessInstanceByKey("jobPrioExpressionProcess",
Variables.createVariables().putValue("priority", i)));
}
// when making a job query and filtering by job priority
// then the correct job is returned
Job job = managementService.createJobQuery().priorityHigherThanOrEquals(2L)
.priorityLowerThanOrEquals(2L).singleResult();
assertNotNull(job);
assertEquals(2, job.getPriority());
assertEquals(instances.get(2).getId(), job.getProcessInstanceId());
}
}
| {
"content_hash": "9f37227e8efed9caa4216c53c1bcd14c",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 123,
"avg_line_length": 41.09701492537314,
"alnum_prop": 0.7381514436172144,
"repo_name": "ingorichtsmeier/camunda-bpm-platform",
"id": "592fbe449de5fea44f292ae1f899951a9682f35a",
"size": "6314",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "engine/src/test/java/org/camunda/bpm/engine/test/api/mgmt/JobQueryByPriorityTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8608"
},
{
"name": "CSS",
"bytes": "5486"
},
{
"name": "Fluent",
"bytes": "3111"
},
{
"name": "FreeMarker",
"bytes": "1442812"
},
{
"name": "Groovy",
"bytes": "1904"
},
{
"name": "HTML",
"bytes": "961289"
},
{
"name": "Java",
"bytes": "44047866"
},
{
"name": "JavaScript",
"bytes": "3063613"
},
{
"name": "Less",
"bytes": "154956"
},
{
"name": "Python",
"bytes": "192"
},
{
"name": "Ruby",
"bytes": "60"
},
{
"name": "SQLPL",
"bytes": "44180"
},
{
"name": "Shell",
"bytes": "11634"
}
],
"symlink_target": ""
} |
using System.Reflection;
using System.Runtime.CompilerServices;
// Information about this assembly is defined by the following attributes.
// Change them to the values specific to your project.
[assembly: AssemblyTitle ("Sweet.Jayson.ConsoleTest")]
[assembly: AssemblyDescription ("")]
[assembly: AssemblyConfiguration ("")]
[assembly: AssemblyCompany ("")]
[assembly: AssemblyProduct ("")]
[assembly: AssemblyCopyright ("CAAN")]
[assembly: AssemblyTrademark ("")]
[assembly: AssemblyCulture ("")]
// The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}".
// The form "{Major}.{Minor}.*" will automatically update the build and revision,
// and "{Major}.{Minor}.{Build}.*" will update just the revision.
[assembly: AssemblyVersion ("1.0.*")]
// The following attributes are used to specify the signing key for the assembly,
// if desired. See the Mono documentation for more information about signing.
//[assembly: AssemblyDelaySign(false)]
//[assembly: AssemblyKeyFile("")]
| {
"content_hash": "15eff702488d748149dfa4e7e7435d25",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 81,
"avg_line_length": 38.111111111111114,
"alnum_prop": 0.7191448007774538,
"repo_name": "ocdogan/Sweet.Jayson",
"id": "97eb084fd1a2d7913ebb5cecf3641bfde9ee55ed",
"size": "1031",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Sweet.Jayson.ConsoleTest/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1333685"
}
],
"symlink_target": ""
} |
<template name="block_edit">
<div class="block-edit-container">
{{#if addBlock}}
<h1>Add block</h1>
<div class="azimuth-form-section">
<h6>New block</h6>
<p>Create a new block</p>
<select class="block-template-selector" placeholder="Select A Block Type">
<option>Select A Block Type</option>
{{#each templates}}
<option value="{{name}}">{{label}}</option>
{{/each}}
</select>
</div>
<div class="azimuth-form-section">
<h6>By Tag</h6>
<p>Add a set of blocks with a particular tag</p>
<select class="block-tag-selector" placeholder="Select A Tag">
<option>Select A Tag</option>
{{#each allTags}}
<option value="{{tag}}">{{tag}}</option>
{{/each}}
</select>
</div>
<div class="azimuth-form-section">
<h6>By Type</h6>
<p>Add all existing blocks of a particular type</p>
<select class="block-type-selector" placeholder="Select A Block Type">
<option>Select A Block Type</option>
{{#each templates}}
<option value="{{name}}">{{label}}</option>
{{/each}}
</select>
</div>
{{else}}
<div>
<h1>Edit Block {{#if blockId}}<a class="azimuth-button alert delete-block micro right">Delete Block</a>{{/if}}</h1>
</div>
{{/if}}
{{#if blockFields}}
<!-- Display block edit form -->
<form class="block-edit-form">
{{> tag tags=currentBlockTags}}
{{#each blockFields}}
{{> formHelper value=this.value type=this.type label=this.label fieldName=this.name }}
{{/each}}
<div class="azimuth-buttons clear">
<a class="azimuth-button text left cancel">Cancel</a>
<a class="azimuth-button right submit"><i class="icon icon-checkmark"></i>Save</a>
</div>
</form>
{{/if}}
</div>
<div class="azimuth-modal-container" id="deleteBlockModal">
<div class="azimuth-modal">
<div class="modal-header">
<h3>Delete Block</h3>
</div>
<div class="modal-body">
<p>Are you sure you want to delete this block? This action cannot be undone.</p>
</div>
<div class="azimuth-buttons clear">
<a class="azimuth-button text left cancel close">Cancel</a>
<a class="azimuth-button right delete-block-confirm alert"><i class="icon icon-cross"></i>Delete</a>
</div>
</div>
<div class="azimuth-modal-bg close"></div>
</div>
</template>
| {
"content_hash": "e0e89086cf6abf249f709803d6a93461",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 123,
"avg_line_length": 33.33766233766234,
"alnum_prop": 0.5578496299181924,
"repo_name": "ndemoreau/azimuth-core",
"id": "21e263e839301058e1ec8967f87262f5818ad895",
"size": "2567",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "client/views/admin/blocks/block_edit.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23176"
},
{
"name": "HTML",
"bytes": "25349"
},
{
"name": "JavaScript",
"bytes": "106233"
}
],
"symlink_target": ""
} |
name: Feature request
about: Help us develop our roadmap and direction.
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
| {
"content_hash": "e6f432f859c0b12cfead44a5450b3da8",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 92,
"avg_line_length": 37.666666666666664,
"alnum_prop": 0.7769911504424779,
"repo_name": "pingcap/grpc-rs",
"id": "8a84863659c8b5a371152522e3000edfbe99ee05",
"size": "569",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": ".github/ISSUE_TEMPLATE/feature_request.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "256"
},
{
"name": "C++",
"bytes": "30905"
},
{
"name": "Rust",
"bytes": "586082"
},
{
"name": "Shell",
"bytes": "424"
}
],
"symlink_target": ""
} |
<meta name="google-site-verification" content="tZgbB2s-hTI0IePQQRCjHqL_Vf0j_XJmehXAHJerrn4" />
# Microsoft Azure PowerShell
This repository contains a set of PowerShell cmdlets for developers and administrators to develop, deploy and manage Microsoft Azure applications.
* For documentation on how to build and deploy applications to Microsoft Azure please see the [Microsoft Azure Documentation Center](http://azure.microsoft.com/en-us/documentation/).
* For comprehensive documentation on the developer cmdlets see [How to install and configure Azure PowerShell](http://azure.microsoft.com/en-us/documentation/articles/install-configure-powershell/).
* For comprehensive documentation on the full set of Microsoft Azure cmdlets see [Microsoft Azure Management Center](http://go.microsoft.com/fwlink/?linkID=254459&clcid=0x409).
## Features
* Account
* Get and import Azure publish settings
* Login with Microsoft account or Organizational account through Microsoft Azure Active Directory
* Environment
* Get the different out-of-box Microsoft Azure environments
* Add/Set/Remove customized environments (like your Windows Azure Pack environments)
* Get Azure publish settings for a particular environment
* Subscription
* Manage Azure subscription
* Manage AffinityGroup
* Website
* Manage website, such as CRUD, start and stop.
* Manage slot
* Manage WebJob
* Deploy project via WebDeploy
* Diagnostics
* Configure site and application diagnostics
* Log streaming
* Save log
* Cloud Services
* Create scaffolding for cloud service and role. Role support includes Node.js and PHP.
* Manage cloud service and role, such as CRUD, start and stop.
* Manage extension.
* Start/Stop Azure emulator.
* Manage certificate.
* Manage cloud service extensions
* Remote desktop
* Diagnostics
* Microsoft Antimalware
* Windows Azure Diagnostics
* Storage
* Manage storage account and access key.
* Manage storage container and blob, with paging.
* Copy storage blob.
* Manage storage table.
* Manage storage queue.
* Create SAS token.
* Manage metrics and logging.
* Configure timeout
* SQL Database
* CRUD support for database server, database and firewall rule.
* Get database server quota.
* Get/Set database server service objective.
* Manage database copies and active geo-replication.
* Get dropped databases that can be restored.
* Issue requests to restore a live or dropped database to a point in time.
* Issue requests to recover a database from an unavailable database server.
* Manage database and database server auditing policy.
* Service Bus
* Manage service bus namespaces.
* VM
* Manage VM, such as CRUD, import/export and start/stop/restart.
* Manage VM image and VM image disks.
* Manage disk, such as CRUD.
* Manage VM endpoint, such as CRUD and ACL.
* Get/Set VM sub net.
* Manage certificate and SSH key.
* PowerShell remoting
* Manage extension
* BG Info
* Chef
* Puppet
* Custom Script
* Access
* Microsoft Antimalware
* PowerShell DSC
* Windows Azure Diagnostics
* Public IP, reserved IP and internal load balancer
* Deployment
* Manage deployment, such as CRUD, move, upgrade and restore.
* Get/Create DNS settings of a deployment.
* VNet
* Manage virtual network config, connection and gateway.
* Manage static IP
* Azure Media Services
* Create, read and delete Media Services Accounts
* Generate new account keys for Media Services Accounts
* HDInsight
* Manage clusters, such as CRUD, add/set storage
* Manage jobs, such as CRUD, start/stop/wait/invoke
* Manage HTTP service access. such as grant/revoke
* Store
* View available Microsoft Azure Store Add-On plans.
* Purchase, view, upgrade and remove Microsoft Azure Store Add-On instances.
* Utility
* Test whether a name is available. Currently support cloud service name, storage account name and service bus namespace name.
* Get the list of geo locations supported by Azure.
* Get the list of OS supported by Azure.
* Direct you to Azure portal.
* Windows Azure Pack
* Web Site: CRUD web site, deployment, configure and get log, start/stop/restart/show web site
* Service Bus: CRD namespace
* VM: CRUD VM, get OS disk, size profile and VM template, start/stop/restart/suspend/resume VM
* VNET: CRUD VNET and subnet.
* Cloud Service: CRUD cloud service.
* ExpressRoute
* Manage dedicated circuit
* Manage BGP peering
* Scheduler
* Manage job collections
* Manage HTTP and storage queue jobs
* Resource Manager
* Manage resource groups and deployments
* Query and download gallery templates
* Manage individual resources
* Traffic Manager
* Manage profiles and endpoints
* Azure Automation
* Manage automation accounts
* Manage automation jobs, runbooks and schedules
For detail descriptions and examples of the cmdlets, type
* ```help azure``` to get all the cmdlets.
* ```help node-dev``` to get all Node.js development related cmdlets.
* ```help php-dev``` to get all PHP development related cmdlets.
* ```help <cmdlet name>``` to get the details of a specific cmdlet.
## Supported Environments
* [Microsoft Azure](http://www.azure.microsoft.com)
* [Windows Azure Pack](http://www.microsoft.com/en-us/server-cloud/windows-azure-pack.aspx)
* [Microsoft Azure China](http://www.windowsazure.cn/)
## Installation
### Microsoft Web Platform Installer
1. Install [Microsoft Web Platform Installer](http://www.microsoft.com/web/downloads/platform.aspx).
2. Open Microsoft Web Platform Installer and search for __Microsoft Azure PowerShell__.
3. Install.
You can also find the standalone installers for all the versions at [Downloads](https://github.com/Azure/azure-powershell/releases)
### Source Code
1. Download the source code from GitHub repo
2. Follow the [Microsoft Azure PowerShell Developer Guide](https://github.com/Azure/azure-powershell/wiki/Microsoft-Azure-PowerShell-Developer-Guide)
### Supported PowerShell Versions
* 0.6.9 or lower
* [Windows PowerShell 2.0](http://technet.microsoft.com/en-us/scriptcenter/dd742419)
* [Windows PowerShell 3.0](http://www.microsoft.com/en-us/download/details.aspx?id=34595)
* 0.6.10 to higher
* [Windows PowerShell 3.0](http://www.microsoft.com/en-us/download/details.aspx?id=34595)
## Get Started
In general, following are the steps to start using Microsoft Azure PowerShell
* Get yourself authenticated with Microsoft Azure. For details, please check out [this article](http://azure.microsoft.com/en-us/documentation/articles/install-configure-powershell/).
* Option 1: Login with your Microsoft account or Organizational account directly from PowerShell. Microsoft Azure Active Directory authentication is used in this case. No management certificate is needed.
* Starting from 0.8.6, you can use ``Add-AzureAccount -Credential`` to avoid the browser pop up for Organizational account.
* Option 2: Download and import a publish settings file which contains a management certificate.
* Use the cmdlets
The first step can be different for different environment you are targeting. Following are detail instructions for each supported environment.
### Microsoft Azure
If you use both mechanisms on the same subscription, Microsoft Azure Active Directory authentication always wins. If you want to go back to management certificate authentication, please use ``Remove-AzureAccount``, which will remove the Microsoft Azure Active Directory information and bring management certificate authentication back in.
#### Login directly from PowerShell (Microsoft Azure Active Directory authentication)
```powershell
# Pop up an embedded browser control for you to login
Add-AzureAccount
# use the cmdlets to manage your services/applications
New-AzureWebsite -Name mywebsite -Location "West US"
```
#### Use publish settings file (Management certificate authentication)
```powershell
# Download a file which contains the publish settings information of your subscription.
# This will open a browser window and ask you to log in to get the file.
Get-AzurePublishSettingsFile
# Import the file you just downloaded.
# Notice that the file contains credential of your subscription so you don't want to make it public
# (like check in to source control, etc.).
Import-AzurePublishSettingsFile "<file location>"
# Use the cmdlets to manage your services/applications
New-AzureWebsite -Name mywebsite -Location "West US"
```
### Microsoft Azure China
```powershell
# Check the environment supported by your Microsoft Azure PowerShell installation.
Get-AzureEnvironment
# Download a file which contains the publish settings information of your subscription.
# Use the -Environment parameter to target Microsoft Azure China.
# This will open a browser window and ask you to log in to get the file.
Get-AzurePublishSettingsFile -Environment "AzureChinaCloud"
# Import the file you just downloaded.
# Notice that the file contains credential of your subscription so you don't want to make it public
# (like check in to source control, etc.).
Import-AzurePublishSettingsFile "<file location>"
# Use the cmdlets to manage your services/applications
New-AzureWebsite -Name mywebsite -Location "China East"
```
### Windows Azure Pack
```powershell
# Add your Windows Azure Pack environment to your Microsoft Azure PowerShell installation.
# You will need to know the following information of your Windows Azure Pack environment.
# 1. URL to download the publish settings file Mandatory
# 2. Management service endpoint Optional
# 3. Management Portal URL Optional
# 4. Storage service endpoint Optional
Add-WAPackEnvironment -Name "MyWAPackEnv" `
-PublishSettingsFileUrl "URL to download the publish settings file>" `
-ServiceEndpoint "<Management service endpoint>" `
-ManagementPortalUrl "<Storage service endpoint>" `
-StorageEndpoint "<Management Portal URL>"
# Download a file which contains the publish settings information of your subscription.
# Use the -Environment parameter to target your Windows Azure Pack environment.
# This will open a browser window and ask you to log in to get the file.
Get-WAPackPublishSettingsFile -Environment "MyWAPackEnv"
# Import the file you just downloaded.
# Notice that the file contains credential of your subscription so you don't want to make it public
# (like check in to source control, etc.).
Import-WAPackPublishSettingsFile "<file location>"
# Use the cmdlets to manage your services/applications
New-WAPackWebsite -Name mywebsite
```
## 2 Modes
Starting from 0.8.0, we are adding a separate mode for Resource Manager. You can use the following cmdlet to switch between the
* Service management: cmdlets using the Azure service management API
* Resource manager: cmdlets using the Azure Resource Manager API
They are not designed to work together.
```powershell
Switch-AzureMode AzureServiceManagement
Switch-AzureMode AzureResourceManager
```
## Find Your Way
All the cmdlets can be put into 3 categories:
1. Cmdlets support both Microsoft Azure and Windows Azure Pack
2. Cmdlets only support both Microsoft Azure
3. Cmdlets only support Windows Azure Pack
* For category 1, we are using an "Azure" prefix in the cmdlet name and adding an alias with "WAPack" prefix.
* For category 2, we are using an "Azure" prefix in the cmdlet name.
* For category 2, we are using an "WAPack" prefix in the cmdlet name.
So you can use the following cmdlet to find out all the cmdlets for your environment
```powershell
# Return all the cmdlets for Microsoft Azure
Get-Command *Azure*
# Return all the cmdlets for Windows Azure Pack
Get-Command *WAPack*
```
If you want to migrate some scripts from Microsoft Azure to Windows Azure Pack or vice versa, as long as the cmdlets you are using are in category 1, you should be able to migrate smoothly.
## Need Help?
Be sure to check out the [Microsoft Azure Developer Forums on Stack Overflow](http://go.microsoft.com/fwlink/?LinkId=234489) if you have trouble with the provided code.
## Contribute Code or Provide Feedback
If you would like to become an active contributor to this project please follow the instructions provided in [Microsoft Azure Projects Contribution Guidelines](http://windowsazure.github.com/guidelines.html).
If you encounter any bugs with the library please file an issue in the [Issues](https://github.com/Azure/azure-powershell/issues) section of the project.
# Learn More
* [Microsoft Azure Script Center](http://www.azure.microsoft.com/en-us/documentation/scripts/)
| {
"content_hash": "79645a5623c52a8ca618898696ad3c67",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 338,
"avg_line_length": 42.41610738255034,
"alnum_prop": 0.7670094936708861,
"repo_name": "mayurid/azure-powershell",
"id": "1a5369247b63f4679fe91d2121c0f333083f1c20",
"size": "12640",
"binary": false,
"copies": "10",
"ref": "refs/heads/dev",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "15822"
},
{
"name": "C#",
"bytes": "22945450"
},
{
"name": "HTML",
"bytes": "209"
},
{
"name": "JavaScript",
"bytes": "4979"
},
{
"name": "PHP",
"bytes": "41"
},
{
"name": "PowerShell",
"bytes": "1926265"
},
{
"name": "Shell",
"bytes": "50"
}
],
"symlink_target": ""
} |
package com.technawabs.pocketbank.ui.fragments;
import android.app.ProgressDialog;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.technawabs.pocketbank.R;
import com.technawabs.pocketbank.models.ConnectionDto;
import com.technawabs.pocketbank.ui.adapter.ContactUserAdapter;
import com.twitter.sdk.android.core.identity.TwitterLoginButton;
import java.util.ArrayList;
import java.util.List;
public class TwitterContactFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private final String TAG=this.getClass().getSimpleName();
private ProgressDialog progressDialog;
private List<ConnectionDto> connectionDtos;
private RecyclerView recyclerView;
private LinearLayoutManager linearLayoutManager;
private ContactUserAdapter contactUserAdapter;
private TwitterLoginButton twitterLoginButton;
public TwitterContactFragment() {
// Required empty public constructor
}
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @param param1 Parameter 1.
* @param param2 Parameter 2.
* @return A new instance of fragment TwitterContactFragment.
*/
// TODO: Rename and change types and number of parameters
public static TwitterContactFragment newInstance(String param1, String param2) {
TwitterContactFragment fragment = new TwitterContactFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view=inflater.inflate(R.layout.fragment_twiiter_contact, container, false);
recyclerView = (RecyclerView) view.findViewById(R.id.contact_list);
twitterLoginButton=(TwitterLoginButton)view.findViewById(R.id.twitter_login);
// progressDialog=ProgressDialog.show(getContext(),"","Loading...",true);
linearLayoutManager = new LinearLayoutManager(getContext());
linearLayoutManager.setOrientation(LinearLayoutManager.VERTICAL);
linearLayoutManager.setSmoothScrollbarEnabled(true);
recyclerView.setLayoutManager(linearLayoutManager);
connectionDtos=new ArrayList<>();
// contactUserAdapter=new ContactUserAdapter(connectionDtos,getContext());
recyclerView.setAdapter(contactUserAdapter);
return view;
}
}
| {
"content_hash": "0cb124e2623be0697510c97048a21858",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 88,
"avg_line_length": 39.40229885057471,
"alnum_prop": 0.7284130688448075,
"repo_name": "AadityaDev/pocketbank",
"id": "4f9e1bdafd145dbb5df564cd26d3bcbc8fb4ee8a",
"size": "3428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/technawabs/pocketbank/ui/fragments/TwitterContactFragment.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "106610"
}
],
"symlink_target": ""
} |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
use yii\helpers\ArrayHelper;
use kartik\select2\Select2;
/* @var $this yii\web\View */
/* @var $model app\models\Empleado */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="empleado-form">
<?php $form = ActiveForm::begin(); ?>
<?= $form->field($model, 'nombre')->textInput() ?>
<?= $form->field($model, 'direccion')->textInput() ?>
<?= $form->field($model, 'contacto')->textInput() ?>
<?= $form->field($model, 'telefono')->textInput() ?>
<?= $form->field($model, 'correo')->textInput() ?>
<?= Html::submitButton($model->isNewRecord ? 'Guardar' : 'Guardar cambios', ['class' => $model->isNewRecord ? 'btn btn-success' : 'btn btn-primary']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| {
"content_hash": "eae12c72c212f0a39e8bc9b915f06a96",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 160,
"avg_line_length": 23.558823529411764,
"alnum_prop": 0.5930087390761548,
"repo_name": "rzamarripa/shabel",
"id": "2baa8926b064eb2a287b05a5e1cdd226cbe8c53f",
"size": "801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "views/proveedor/_form.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "199"
},
{
"name": "Batchfile",
"bytes": "1030"
},
{
"name": "CSS",
"bytes": "1533204"
},
{
"name": "HTML",
"bytes": "179629"
},
{
"name": "JavaScript",
"bytes": "3469599"
},
{
"name": "PHP",
"bytes": "295872"
}
],
"symlink_target": ""
} |
// This code is auto-generated, do not modify
package com.spectralogic.ds3client.commands.spectrads3;
import com.spectralogic.ds3client.networking.HttpVerb;
import com.spectralogic.ds3client.commands.interfaces.AbstractRequest;
import com.spectralogic.ds3client.models.S3InitialDataPlacementPolicy;
import com.spectralogic.ds3client.models.DataReplicationRuleType;
import com.google.common.net.UrlEscapers;
public class ModifyS3DataReplicationRuleSpectraS3Request extends AbstractRequest {
// Variables
private final String s3DataReplicationRule;
private S3InitialDataPlacementPolicy initialDataPlacement;
private long maxBlobPartSizeInBytes;
private boolean replicateDeletes;
private DataReplicationRuleType type;
// Constructor
public ModifyS3DataReplicationRuleSpectraS3Request(final String s3DataReplicationRule) {
this.s3DataReplicationRule = s3DataReplicationRule;
}
public ModifyS3DataReplicationRuleSpectraS3Request withInitialDataPlacement(final S3InitialDataPlacementPolicy initialDataPlacement) {
this.initialDataPlacement = initialDataPlacement;
this.updateQueryParam("initial_data_placement", initialDataPlacement);
return this;
}
public ModifyS3DataReplicationRuleSpectraS3Request withMaxBlobPartSizeInBytes(final long maxBlobPartSizeInBytes) {
this.maxBlobPartSizeInBytes = maxBlobPartSizeInBytes;
this.updateQueryParam("max_blob_part_size_in_bytes", maxBlobPartSizeInBytes);
return this;
}
public ModifyS3DataReplicationRuleSpectraS3Request withReplicateDeletes(final boolean replicateDeletes) {
this.replicateDeletes = replicateDeletes;
this.updateQueryParam("replicate_deletes", replicateDeletes);
return this;
}
public ModifyS3DataReplicationRuleSpectraS3Request withType(final DataReplicationRuleType type) {
this.type = type;
this.updateQueryParam("type", type);
return this;
}
@Override
public HttpVerb getVerb() {
return HttpVerb.PUT;
}
@Override
public String getPath() {
return "/_rest_/s3_data_replication_rule/" + s3DataReplicationRule;
}
public String getS3DataReplicationRule() {
return this.s3DataReplicationRule;
}
public S3InitialDataPlacementPolicy getInitialDataPlacement() {
return this.initialDataPlacement;
}
public long getMaxBlobPartSizeInBytes() {
return this.maxBlobPartSizeInBytes;
}
public boolean getReplicateDeletes() {
return this.replicateDeletes;
}
public DataReplicationRuleType getType() {
return this.type;
}
} | {
"content_hash": "a34c46f1def8ce442ff015ce9b4ab533",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 138,
"avg_line_length": 27.95876288659794,
"alnum_prop": 0.745575221238938,
"repo_name": "DenverM80/ds3_java_sdk",
"id": "d965786c4f1b78898894e28a2f8899d626f50fb2",
"size": "3469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ds3-sdk/src/main/java/com/spectralogic/ds3client/commands/spectrads3/ModifyS3DataReplicationRuleSpectraS3Request.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "4841457"
},
{
"name": "Kotlin",
"bytes": "9285"
},
{
"name": "Ruby",
"bytes": "3144"
},
{
"name": "Shell",
"bytes": "474"
}
],
"symlink_target": ""
} |
lychee.define('lychee.net.service.Stash').includes([
'lychee.net.Service'
]).exports((lychee, global, attachments) => {
const _Service = lychee.import('lychee.net.Service');
/*
* IMPLEMENTATION
*/
const Composite = function(data) {
let states = Object.assign({}, data);
_Service.call(this, states);
states = null;
/*
* INITIALIZATION
*/
this.bind('sync', function(data) {
let tunnel = this.tunnel;
if (tunnel !== null && tunnel.type === 'remote') {
this.broadcast(data, {
event: 'sync'
});
}
}, this);
};
Composite.prototype = {
/*
* ENTITY API
*/
// deserialize: function(blob) {},
serialize: function() {
let data = _Service.prototype.serialize.call(this);
data['constructor'] = 'lychee.net.service.Stash';
return data;
},
/*
* CUSTOM API
*/
sync: function(assets) {
assets = assets instanceof Object ? assets : null;
if (assets !== null) {
let data = {};
for (let id in assets) {
data[id] = lychee.serialize(assets[id]);
}
return this.send({
timestamp: Date.now(),
assets: data
}, {
event: 'sync'
});
}
return false;
}
};
return Composite;
});
| {
"content_hash": "cb91443fdadb49e780cffc444be78448",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 54,
"avg_line_length": 12.048543689320388,
"alnum_prop": 0.5600322320709106,
"repo_name": "Artificial-Engineering/lycheeJS",
"id": "66a475a62e9526755eb56743d2128757c9b2a7f3",
"size": "1241",
"binary": false,
"copies": "2",
"ref": "refs/heads/development",
"path": "lycheejs/libraries/lychee/source/net/service/Stash.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AppleScript",
"bytes": "258"
},
{
"name": "CSS",
"bytes": "21259"
},
{
"name": "HTML",
"bytes": "56420"
},
{
"name": "JavaScript",
"bytes": "1206074"
},
{
"name": "Shell",
"bytes": "38111"
},
{
"name": "Smarty",
"bytes": "5714"
}
],
"symlink_target": ""
} |

# SparkleFormation CLI
SparkleFormation command line interface for interacting
with orchestration APIs.
## API Compatibility
* AWS
* Azure
* Google
* Heat
* OpenStack
* Rackspace
* Terraform
## Documentation
* [User Documentation](http://www.sparkleformation.io/docs/sfn/)
* [sfn API Documentation](http://www.sparkleformation.io/docs/sfn/)
# Info
* Repository: https://github.com/sparkleformation/sfn
* Website: http://www.sparkleformation.io/docs/sfn/
* Mailing List: https://groups.google.com/forum/#!forum/sparkleformation
* IRC: [#sparkleformation @ Freenode](https://webchat.freenode.net/?channels=#sparkleformation)
* Gitter: [](https://gitter.im/SparkleFormation/sparkleformation?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[miasma]: http://miasma-rb.github.io/miasma/
| {
"content_hash": "83c78c2175d53feb5650f071d7024759",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 265,
"avg_line_length": 30.65625,
"alnum_prop": 0.7726809378185525,
"repo_name": "sparkleformation/sfn",
"id": "4588172d5ed0842a022f7b7516ff8b807c732cb3",
"size": "981",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ruby",
"bytes": "354215"
}
],
"symlink_target": ""
} |
#ifndef FREESPACE_H_
#define FREESPACE_H_
#include "storage/block.h"
#include "storage/relfilenode.h"
#include "utils/relcache.h"
/* prototypes for public functions in freespace.c */
extern Size GetRecordedFreeSpace(Relation rel, BlockNumber heapBlk);
extern BlockNumber GetPageWithFreeSpace(Relation rel, Size spaceNeeded);
extern BlockNumber RecordAndGetPageWithFreeSpace(Relation rel,
BlockNumber oldPage,
Size oldSpaceAvail,
Size spaceNeeded);
extern void RecordPageWithFreeSpace(Relation rel, BlockNumber heapBlk,
Size spaceAvail);
extern void XLogRecordPageWithFreeSpace(RelFileNode rnode, BlockNumber heapBlk,
Size spaceAvail);
extern void FreeSpaceMapTruncateRel(Relation rel, BlockNumber nblocks);
extern void FreeSpaceMapVacuum(Relation rel);
#endif /* FREESPACE_H_ */
| {
"content_hash": "6627a8a643f1e1f6637fd0d547eae9c2",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 79,
"avg_line_length": 34.416666666666664,
"alnum_prop": 0.7808716707021792,
"repo_name": "ArcherCraftStore/ArcherVMPeridot",
"id": "e5862abd2eab101d9c9ee37a8d04a360a4959f86",
"size": "1264",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "pgsql/include/server/storage/freespace.h",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import { FieldSchema, StructureSchema } from '@ephox/boulder';
import { Height, SugarElement, Width } from '@ephox/sugar';
import * as Fields from '../../data/Fields';
export default [
FieldSchema.required('closedClass'),
FieldSchema.required('openClass'),
FieldSchema.required('shrinkingClass'),
FieldSchema.required('growingClass'),
// Element which shrinking and growing animations
FieldSchema.option('getAnimationRoot'),
Fields.onHandler('onShrunk'),
Fields.onHandler('onStartShrink'),
Fields.onHandler('onGrown'),
Fields.onHandler('onStartGrow'),
FieldSchema.defaulted('expanded', false),
FieldSchema.requiredOf('dimension', StructureSchema.choose(
'property', {
width: [
Fields.output('property', 'width'),
Fields.output('getDimension', (elem: SugarElement<HTMLElement>) => Width.get(elem) + 'px')
],
height: [
Fields.output('property', 'height'),
Fields.output('getDimension', (elem: SugarElement<HTMLElement>) => Height.get(elem) + 'px')
]
}
))
];
| {
"content_hash": "1c253a8c3d12adebba928f19b320a6da",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 99,
"avg_line_length": 31.848484848484848,
"alnum_prop": 0.6774500475737393,
"repo_name": "tinymce/tinymce",
"id": "2a4a6ae7f5c45a7a24566a27c00475aef98d2cec",
"size": "1051",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "modules/alloy/src/main/ts/ephox/alloy/behaviour/sliding/SlidingSchema.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9733"
},
{
"name": "HTML",
"bytes": "183264"
},
{
"name": "JavaScript",
"bytes": "117530"
},
{
"name": "Less",
"bytes": "182379"
},
{
"name": "TypeScript",
"bytes": "11764279"
}
],
"symlink_target": ""
} |
benchget
==============
Simple Get benchmark command line similar to Apache Bench
Usage: index.js,nab,benchget [options]
Options:
-h, --help output usage information
-V, --version output the version number
-n, --num_requests [num] Number of total requests (n)
-c, --concurrent [num] Number of concurrent requests
-o, --out [filepath] Write responses to [output] as json
Url to hit is the last parameter
Example: nab -n 10 -c 5 -o output.json "http://google.com/"
Example: benchget -n 10 -c 5 -o output.json "http://google.com/"
Example: node index.js -n 10 -c 5 -o output.json "http://google.com/"
| {
"content_hash": "3579bd978e1d5f445e3c580ae76d8e47",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 69,
"avg_line_length": 29,
"alnum_prop": 0.6356821589205397,
"repo_name": "miketheprogrammer/node-bench-get",
"id": "cd851c75c17b336b25efce81bf5409722bd2de0e",
"size": "667",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "2391"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe HamlLint::Linter::SpaceBeforeScript do
include_context 'linter'
context 'when silent script has no separating space' do
let(:haml) { <<-HAML }
%span Hello
-some_code
%span World
HAML
it { should report_lint line: 2 }
end
context 'when silent script has a separating space' do
let(:haml) { <<-HAML }
%span Hello
- some_code
%span World
HAML
it { should_not report_lint }
end
context 'when script has no separating space' do
let(:haml) { <<-HAML }
%span Hello
=some_code
%span World
HAML
it { should report_lint line: 2 }
end
context 'when script has a separating space' do
let(:haml) { <<-HAML }
%span Hello
= some_code
%span World
HAML
it { should_not report_lint }
end
context 'when inline script has no separating space' do
let(:haml) { <<-HAML }
%span Hello
%span='there'
%span World
HAML
it { should report_lint line: 2 }
end
context 'when inline script has a separating space' do
let(:haml) { <<-HAML }
%span Hello
%span= 'there'
%span World
HAML
it { should_not report_lint }
end
context 'when inline script contains string that is the same as the tag' do
let(:haml) { <<-HAML }
%tag.count= count
HAML
it { should_not report_lint }
end
context 'when inline script spreads across multiple lines via comma' do
context 'and the script has a separating space' do
let(:haml) { <<-HAML }
%tag= link_to 'Link',
path
HAML
it { should_not report_lint }
end
context 'and the script does not have a separating space' do
let(:haml) { <<-HAML }
%tag=link_to 'Link',
path
%tag
HAML
it { should report_lint line: 1 }
end
end
context 'when inline script spreads across multiple lines via vertical pipe' do
context 'and the script has a separating space' do
let(:haml) { <<-HAML }
%tag= link_to 'Click' + |
'Here' |
HAML
it { should_not report_lint }
end
context 'and the script does not have a separating space' do
let(:haml) { <<-HAML }
%tag=link_to 'Click' + |
'Here' |
%tag
HAML
it { should report_lint line: 1 }
end
end
context 'when plain text contains interpolation' do
let(:haml) { <<-HAML }
%p
Some \#{interpolated} text
HAML
it { should_not report_lint }
end
context 'when inline tag text contains interpolation' do
let(:haml) { '%p Some #{interpolated} text' }
it { should_not report_lint }
end
context 'when inline tag script contains quotes' do
context 'and there is no separating space' do
let(:haml) { '%p="Some #{interpolated} text"' }
it { should report_lint }
end
context 'and there is a separating space' do
let(:haml) { '%p= "Some #{interpolated} text"' }
it { should_not report_lint }
end
end
context 'when inline tag is nested in another tag' do
let(:haml) { <<-HAML }
%div
%div
%div= value
%div= array[value]
HAML
it { should_not report_lint }
context 'and the tag has siblings' do
let(:haml) { <<-HAML }
%div
%div Hello
%div= value
= array[value]
HAML
it { should_not report_lint }
end
end
end
| {
"content_hash": "8c35c79ee7effbecd7465e83b57abf48",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 81,
"avg_line_length": 21.615853658536587,
"alnum_prop": 0.5678420310296192,
"repo_name": "mzp/haml-lint",
"id": "6addbf2884ddf3c01fb563c46794d34fb4c4f58e",
"size": "3545",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spec/haml_lint/linter/space_before_script_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "197586"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>PowerShell - about_Rivet_Intellisense - Rivet</title>
<link href="silk.css" type="text/css" rel="stylesheet" />
<link href="styles.css" type="text/css" rel="stylesheet" />
</head>
<body>
<ul id="SiteNav">
<li><a href="index.html">Get-Rivet</a></li>
<!--<li><a href="about_Carbon_Installation.html">-Install</a></li>-->
<li><a href="documentation.html">-Documentation</a></li>
<!--<li><a href="about_Carbon_Support.html">-Support</a></li>-->
<li><a href="releasenotes.html">-ReleaseNotes</a></li>
<li><a href="http://pshdo.com">-Blog</a></li>
</ul>
<h1>about_Rivet_Intellisense</h1>
<p>Explains how to get Intellisense when writing Rivet migrations.</p>
<h2>Description</h2>
<p>In order to get Intellisense when writing migrations, you'll need to have PowerShell
<strong><em>3</em></strong> installed, and use the PowerShell Integrated Scripting Environment (i.e.
ISE). You should be able to open a migration in the ISE by right-clicking it and
choosing "Edit". </p>
<p>Once you've got a migration open in the ISE, you'll want to import the Rivet module.<br />
Use the <code>Import-Rivet.ps1</code> script:</p>
<pre><code>PS> Import-Rivet.ps1
</code></pre>
<p>Make sure you use the path to Rivet in your environment.</p>
<p>Once you've imported Rivet, you can get a list of available migrations by running
this command:</p>
<pre><code>PS> Get-Command -Module Rivet -CommandType Function
</code></pre>
<div class="Footer">
Copyright 2013 - 2016 <a href="http://pshdo.com">Aaron Jensen</a>.
</div>
</body>
</html>
| {
"content_hash": "aa4a20d14f73c152024e9f99605382c9",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 102,
"avg_line_length": 35.2,
"alnum_prop": 0.6539772727272727,
"repo_name": "RivetDB/Rivet",
"id": "1936c8841eaaca8d0fe9e867edad50394ebb5b07",
"size": "1760",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Website/about_Rivet_Intellisense.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1702"
},
{
"name": "C#",
"bytes": "239528"
},
{
"name": "CSS",
"bytes": "11203"
},
{
"name": "HTML",
"bytes": "1140659"
},
{
"name": "JavaScript",
"bytes": "1659"
},
{
"name": "PLSQL",
"bytes": "326"
},
{
"name": "PowerShell",
"bytes": "1366494"
},
{
"name": "XSLT",
"bytes": "1394"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd"
default-lazy-init="true">
<description>Shiro安全配置</description>
<!-- Shiro's main business-tier object for web-enabled applications -->
<bean id="securityManager" class="org.apache.shiro.web.mgt.DefaultWebSecurityManager">
<property name="realm" ref="shiroDbRealm" />
<property name="cacheManager" ref="shiroEhcacheManager" />
</bean>
<!-- 項目自定义的Realm, 所有accountService依赖的dao都需要用depends-on声明 -->
<bean id="shiroDbRealm" class="com.agileEAP.security.service.ShiroDbRealm"
depends-on="operatorRepository">
<property name="accountService" ref="accountService" />
</bean>
<!-- Shiro Filter -->
<bean id="shiroFilter" class="org.apache.shiro.spring.web.ShiroFilterFactoryBean">
<property name="securityManager" ref="securityManager" />
<property name="loginUrl" value="/login" />
<property name="successUrl" value="/" />
<property name="filterChainDefinitions">
<value>
/login = authc
/logout = logout
/js/** = anon
/validcode/** = anon
/images/** = anon
/themes/** = anon
/register/** = anon
/cxf/** = anon
/admin/** = roles[admin]
/** = user
</value>
</property>
</bean>
<!-- 用户授权信息Cache, 采用EhCache -->
<bean id="shiroEhcacheManager" class="org.apache.shiro.cache.ehcache.EhCacheManager">
<property name="cacheManagerConfigFile" value="classpath:ehcache/ehcache-shiro.xml" />
</bean>
<!-- 保证实现了Shiro内部lifecycle函数的bean执行 -->
<bean id="lifecycleBeanPostProcessor" class="org.apache.shiro.spring.LifecycleBeanPostProcessor" />
</beans> | {
"content_hash": "ab6def96ef246ade99850193f3d8d624",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 130,
"avg_line_length": 36.816326530612244,
"alnum_prop": 0.70509977827051,
"repo_name": "AgileEAP/aglieEAP",
"id": "3bc290fe179523bab77a9fb96d73aa5e1f26ff6b",
"size": "1886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "agileEAP-portal/src/main/resources/security/applicationContext-shiro.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "194138"
},
{
"name": "Java",
"bytes": "813529"
},
{
"name": "JavaScript",
"bytes": "785718"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
} |
package org.basex.query.func.fn;
import static org.basex.util.Token.*;
import java.util.*;
import org.basex.query.*;
import org.basex.query.func.*;
import org.basex.query.value.item.*;
import org.basex.util.*;
/**
* Function implementation.
*
* @author BaseX Team 2005-16, BSD License
* @author Christian Gruen
*/
public final class FnSubstring extends StandardFunc {
@Override
public Item item(final QueryContext qc, final InputInfo ii) throws QueryException {
// normalize positions
final byte[] str = toEmptyToken(exprs[0], qc);
final Item is = toAtomItem(exprs[1], qc);
int s;
if(is instanceof Int) {
s = (int) is.itr(info) - 1;
} else {
final double ds = is.dbl(info);
if(Double.isNaN(ds)) return Str.ZERO;
s = subPos(ds);
}
final boolean end = exprs.length == 3, ascii = ascii(str);
int l = ascii ? str.length : length(str);
int e = l;
if(end) {
final Item ie = toAtomItem(exprs[2], qc);
e = ie instanceof Int ? (int) ie.itr(info) : subPos(ie.dbl(info) + 1);
}
if(s < 0) {
e += s;
s = 0;
}
e = Math.min(l, end ? s + e : Integer.MAX_VALUE);
if(s >= e) return Str.ZERO;
if(ascii) return Str.get(substring(str, s, e));
int ss = s;
int ee = e;
int p = 0;
for(l = 0; l < str.length; l += cl(str, l), ++p) {
if(p == s) ss = l;
if(p == e) ee = l;
}
if(p == e) ee = l;
return Str.get(Arrays.copyOfRange(str, ss, ee));
}
/**
* Returns the specified substring position.
* @param d double value
* @return substring position
*/
private static int subPos(final double d) {
final int i = (int) d;
return d == i ? i - 1 : (int) StrictMath.floor(d - .5);
}
}
| {
"content_hash": "6598e07cd05da5091edab321f64b1096",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 85,
"avg_line_length": 25.36231884057971,
"alnum_prop": 0.5811428571428572,
"repo_name": "drmacro/basex",
"id": "46d2bbc33db08a66896f14f6b13587f67d571f1a",
"size": "1750",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "basex-core/src/main/java/org/basex/query/func/fn/FnSubstring.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "9372"
},
{
"name": "Batchfile",
"bytes": "2502"
},
{
"name": "C",
"bytes": "17146"
},
{
"name": "C#",
"bytes": "15295"
},
{
"name": "C++",
"bytes": "7796"
},
{
"name": "CSS",
"bytes": "3386"
},
{
"name": "Common Lisp",
"bytes": "3211"
},
{
"name": "HTML",
"bytes": "1057"
},
{
"name": "Haskell",
"bytes": "4065"
},
{
"name": "Java",
"bytes": "23497382"
},
{
"name": "JavaScript",
"bytes": "8881"
},
{
"name": "Makefile",
"bytes": "1234"
},
{
"name": "PHP",
"bytes": "8690"
},
{
"name": "Perl",
"bytes": "7801"
},
{
"name": "Python",
"bytes": "26123"
},
{
"name": "QMake",
"bytes": "377"
},
{
"name": "Rebol",
"bytes": "4731"
},
{
"name": "Ruby",
"bytes": "7359"
},
{
"name": "Scala",
"bytes": "11692"
},
{
"name": "Shell",
"bytes": "3557"
},
{
"name": "Visual Basic",
"bytes": "11957"
},
{
"name": "XQuery",
"bytes": "310785"
},
{
"name": "XSLT",
"bytes": "172"
}
],
"symlink_target": ""
} |
Most code has been extracted from the [Rails 3-1-stable branch](https://github.com/rails/rails/tree/3-1-stable). Modified to suit [our needs](http://www.shopify.com).
## Usage
In your `Gemfile`:
gem "sprockets_rails3_backport"
...plus whatever supplementary gems you want for the asset pipeline:
gem 'coffee-script', '2.2.0'
gem 'therubyracer', '0.9.9'
gem 'uglifier', '>= 1.0.3'
In your `routes.rb`:
MyApp::Application.routes.draw do
if (app = Rails.application).config.assets.compile
mount app.assets => app.config.assets.prefix
end
# ...
end
Here are the various `config.assets` options and their defaults:
config.assets.paths = []
config.assets.precompile = [ Proc.new{ |path| !['.js', '.css'].include?(File.extname(path)) },
/(?:\/|\\|\A)application\.(css|js)$/ ]
config.assets.prefix = "/assets"
config.assets.version = ''
config.assets.debug = false
config.assets.compile = true
config.assets.digest = false
config.assets.manifest = nil
config.assets.cache_store = [ :file_store, "#{root}/tmp/cache/assets/" ]
config.assets.js_compressor = nil
config.assets.css_compressor = nil
config.assets.initialize_on_precompile = true
## Differences from Rails 3.1
This gem was made for [Shopify](http://www.shopify.com)'s use, and I've made changes to some behaviour that either didn't work in Rails 3.0, or didn't make sense for Shopify:
* `image_path` (and therefore `image_tag`) [do not use the asset pipeline](https://github.com/jamesmacaulay/sprockets_rails3_backport/blob/d4cd5e5/lib/sprockets/helpers/rails_helper.rb#L60-63).
* Rails 3.0 does not have support for initializer groups, so `rake assets:precompile` [cannot do the trick of partially loading the environment](https://github.com/jamesmacaulay/sprockets_rails3_backport/blob/d4cd5e5/lib/sprockets/assets.rake#L97-98).
* precompilation [does not output gzipped versions of assets](https://github.com/jamesmacaulay/sprockets_rails3_backport/blob/d4cd5e5/lib/sprockets/static_compiler.rb#L39).
* `javascript_path` and `stylesheet_path` helpers have been fixed to [append the appropriate file extension if there's none provided](https://github.com/jamesmacaulay/sprockets_rails3_backport/blob/d4cd5e5/lib/sprockets/helpers/rails_helper.rb#L65-73).
* computing digests in the view helpers [no longer throws an error](https://github.com/jamesmacaulay/sprockets_rails3_backport/blob/d4cd5e5/lib/sprockets/helpers/rails_helper.rb#L145-146) if the digest is not found and `config.assets.compile` is off.
* `config.assets.enabled` [is](https://github.com/jamesmacaulay/sprockets_rails3_backport/blob/d4cd5e5/lib/sprockets/railtie.rb#L18) [not](https://github.com/jamesmacaulay/sprockets_rails3_backport/blob/d4cd5e5/lib/extensions/application_ext.rb#L22) [used](https://github.com/jamesmacaulay/sprockets_rails3_backport/blob/d4cd5e5/lib/sprockets/assets.rake#L30-33).
| {
"content_hash": "7419d910bf2fb9ea4370c73b8c42ff0c",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 363,
"avg_line_length": 63.2,
"alnum_prop": 0.6857594936708861,
"repo_name": "jamesmacaulay/sprockets_rails3_backport",
"id": "14d5946ec84aa600b91a27d1ab1725e0748b1bb1",
"size": "3224",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.markdown",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "23291"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<jcr:root xmlns:cq="http://www.day.com/jcr/cq/1.0" xmlns:jcr="http://www.jcp.org/jcr/1.0"
jcr:primaryType="cq:ClientLibraryFolder"
jsProcessor="[default:none,min:gcc]"
allowProxy="{Boolean}true"
categories="[acs-commons.authoring.composite-multifield,acs-commons.granite.ui.coral2.foundation]"
dependencies="[lodash.underscore]"/>
<!--
Wrapper Client Library definition:
<?xml version="1.0" encoding="UTF-8"?>
<jcr:root xmlns:cq="http://www.day.com/jcr/cq/1.0" xmlns:jcr="http://www.jcp.org/jcr/1.0"
jcr:primaryType="cq:ClientLibraryFolder"
jsProcessor="[default:none,min:gcc]"
allowProxy="{Boolean}true"
categories="[cq.authoring.dialog]"
embed="[acs-commons.authoring.composite-multifield]"/>
-->
<!-- Depending on where this is being used, you may need to set categories="[granite.ui.coral.foundation]" instead -->
| {
"content_hash": "b7ec14dfde9d704de4ddcd7924e11179",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 118,
"avg_line_length": 47.65,
"alnum_prop": 0.6652675760755509,
"repo_name": "Adobe-Consulting-Services/acs-aem-commons",
"id": "6d4669aeb5f5920f605df30d2fbad504d6f0656f",
"size": "953",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ui.apps/src/main/content/jcr_root/apps/acs-commons/touchui-widgets/composite-multifield/.content.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "23257"
},
{
"name": "Groovy",
"bytes": "3122"
},
{
"name": "HTML",
"bytes": "84708"
},
{
"name": "Java",
"bytes": "7126646"
},
{
"name": "JavaScript",
"bytes": "436055"
},
{
"name": "Less",
"bytes": "41808"
},
{
"name": "Rich Text Format",
"bytes": "363"
},
{
"name": "Shell",
"bytes": "354"
}
],
"symlink_target": ""
} |
describe("http.tls module", function()
local tls = require "http.tls"
local cqueues = require "cqueues"
local ca = require "cqueues.auxlib"
local cs = require "cqueues.socket"
local openssl_ctx = require "openssl.ssl.context"
local openssl_pkey = require "openssl.pkey"
local openssl_x509 = require "openssl.x509"
it("banned ciphers list denies a negotiated banned cipher", function()
local banned_cipher_list do
local t = {}
for cipher in pairs(tls.banned_ciphers) do
table.insert(t, cipher)
end
banned_cipher_list = table.concat(t, ":")
end
local s, c = ca.assert(cs.pair())
local cq = cqueues.new()
cq:wrap(function()
local ctx = openssl_ctx.new("TLSv1", false)
assert(c:starttls(ctx))
local ssl = assert(s:checktls())
local cipher = ssl:getCipherInfo()
assert(tls.banned_ciphers[cipher.name])
end)
cq:wrap(function()
local ctx = openssl_ctx.new("TLSv1", true)
ctx:setCipherList(banned_cipher_list)
ctx:setEphemeralKey(openssl_pkey.new{ type = "EC", curve = "prime256v1" })
local crt = openssl_x509.new()
local key = openssl_pkey.new()
crt:setPublicKey(key)
crt:sign(key)
assert(ctx:setPrivateKey(key))
assert(ctx:setCertificate(crt))
assert(s:starttls(ctx))
local ssl = assert(s:checktls())
local cipher = ssl:getCipherInfo()
assert(tls.banned_ciphers[cipher.name])
end)
assert_loop(cq, TEST_TIMEOUT)
assert.truthy(cq:empty())
s:close()
c:close()
end)
it("can create a new client context", function()
tls.new_client_context()
end)
it("can create a new server context", function()
tls.new_server_context()
end)
end)
| {
"content_hash": "4616408c32aac8feba5f9d80c29eb81b",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 77,
"avg_line_length": 31.346153846153847,
"alnum_prop": 0.69079754601227,
"repo_name": "RyanSquared/lua-http",
"id": "3be66733bda7ee59a058395315bfa8d4e0a68c03",
"size": "1630",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spec/tls_spec.lua",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "467900"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.