name
stringlengths
12
178
code_snippet
stringlengths
8
36.5k
score
float64
3.26
3.68
hbase_ProcedureExecutor_isFinished
/** * Return true if the procedure is finished. The state may be "completed successfully" or "failed * and rolledback". Use getResult() to check the state or get the result data. * @param procId the ID of the procedure to check * @return true if the procedure execution is finished, otherwise false. */ public boolean isFinished(final long procId) { return !procedures.containsKey(procId); }
3.68
framework_AriaHelper_handleInputInvalid
/** * Handles the required actions depending of the input element contains * unaccepted input. * * @param element * Element, typically an input Widget like TextField * @param invalid * boolean, true when the element input has an error */ public static void handleInputInvalid(Element element, boolean invalid) { if (invalid) { Roles.getTextboxRole().setAriaInvalidState(element, InvalidValue.TRUE); } else { Roles.getTextboxRole().removeAriaInvalidState(element); } }
3.68
framework_Button_writeDesign
/* * (non-Javadoc) * * @see com.vaadin.ui.AbstractComponent#writeDesign(org.jsoup.nodes.Element * , com.vaadin.ui.declarative.DesignContext) */ @Override public void writeDesign(Element design, DesignContext designContext) { super.writeDesign(design, designContext); Attributes attr = design.attributes(); Button def = designContext.getDefaultInstance(this); String content = getCaption(); if (content != null) { design.html(content); } // plain-text (default is html) if (!isHtmlContentAllowed()) { design.attr(DESIGN_ATTR_PLAIN_TEXT, true); // encode HTML entities if (content != null) { design.html(DesignFormatter.encodeForTextNode(content)); } } // icon-alt DesignAttributeHandler.writeAttribute("icon-alt", attr, getIconAlternateText(), def.getIconAlternateText(), String.class, designContext); // click-shortcut if (clickShortcut != null) { DesignAttributeHandler.writeAttribute("click-shortcut", attr, clickShortcut, null, ShortcutAction.class, designContext); } }
3.68
flink_StringUtils_byteToHexString
/** * Given an array of bytes it will convert the bytes to a hex string representation of the * bytes. * * @param bytes the bytes to convert in a hex string * @return hex string representation of the byte array */ public static String byteToHexString(final byte[] bytes) { return byteToHexString(bytes, 0, bytes.length); }
3.68
flink_FunctionContext_getMetricGroup
/** * Returns the metric group for this parallel subtask. * * @return metric group for this parallel subtask. */ public MetricGroup getMetricGroup() { if (context == null) { LOG.warn( "Calls to FunctionContext.getMetricGroup will have no effect " + "at the current location."); return defaultMetricsGroup; } return context.getMetricGroup(); }
3.68
flink_SourcePredicates_areJavaClasses
/** * Tests that a given class is a Java class. * * <p>ArchUnit does not yet fully support Scala. Rules should ensure that they restrict * themselves to only Java classes for correct results. */ public static DescribedPredicate<JavaClass> areJavaClasses() { return new DescribedPredicate<JavaClass>("are Java classes") { @Override public boolean test(JavaClass clazz) { return isJavaClass(clazz); } }; }
3.68
druid_Resources_getDefaultClassLoader
/** * Returns the default classloader (may be null). * * @return The default classloader */ public static ClassLoader getDefaultClassLoader() { return defaultClassLoader; }
3.68
hbase_WALPrettyPrinter_setSequenceFilter
/** * sets the region by which output will be filtered when nonnegative, serves as a filter; only log * entries with this sequence id will be printed */ public void setSequenceFilter(long sequence) { this.sequence = sequence; }
3.68
flink_DataSet_maxBy
/** * Selects an element with maximum value. * * <p>The maximum is computed over the specified fields in lexicographical order. * * <p><strong>Example 1</strong>: Given a data set with elements <code>[0, 1], [1, 0]</code>, * the results will be: * * <ul> * <li><code>maxBy(0)</code>: <code>[1, 0]</code> * <li><code>maxBy(1)</code>: <code>[0, 1]</code> * </ul> * * <p><strong>Example 2</strong>: Given a data set with elements <code>[0, 0], [0, 1]</code>, * the results will be: * * <ul> * <li><code>maxBy(0, 1)</code>: <code>[0, 1]</code> * </ul> * * <p>If multiple values with maximum value at the specified fields exist, a random one will be * picked. * * <p>Internally, this operation is implemented as a {@link ReduceFunction}. * * @param fields Field positions to compute the maximum over * @return A {@link ReduceOperator} representing the maximum */ @SuppressWarnings({"unchecked", "rawtypes"}) public ReduceOperator<T> maxBy(int... fields) { if (!getType().isTupleType() || !(getType() instanceof TupleTypeInfo)) { throw new InvalidProgramException("DataSet#maxBy(int...) only works on Tuple types."); } return new ReduceOperator<>( this, new SelectByMaxFunction((TupleTypeInfo) getType(), fields), Utils.getCallLocationName()); }
3.68
framework_VUI_storeFocus
/** * Allows to store the currently focused Element. * * Current use case is to store the focus when a Window is opened. Does * currently handle only a single value. Needs to be extended for #12158 * * @param focusedElement */ public void storeFocus() { storedFocus = WidgetUtil.getFocusedElement(); }
3.68
framework_DateCell_updatePositionFor
// date methods are not deprecated in GWT @SuppressWarnings("deprecation") private void updatePositionFor(DateCellDayEvent dayEvent, Date targetDay, CalendarEvent calendarEvent) { if (shouldDisplay(calendarEvent)) { dayEvent.getElement().getStyle().clearDisplay(); Date fromDt = calendarEvent.getStartTime(); int h = fromDt.getHours(); int m = fromDt.getMinutes(); long range = calendarEvent.getRangeInMinutesForDay(targetDay); boolean onDifferentDays = calendarEvent.isTimeOnDifferentDays(); if (onDifferentDays) { if (calendarEvent.getStart().compareTo(targetDay) != 0) { // Current day slot is for the end date and all in-between // days. Lets fix also the start & end times. h = 0; m = 0; } } int startFromMinutes = (h * 60) + m; dayEvent.updatePosition(startFromMinutes, range); } else { dayEvent.getElement().getStyle().setDisplay(Display.NONE); } }
3.68
hudi_StreamerUtil_initTableIfNotExists
/** * Initialize the table if it does not exist. * * @param conf the configuration * @throws IOException if errors happens when writing metadata */ public static HoodieTableMetaClient initTableIfNotExists( Configuration conf, org.apache.hadoop.conf.Configuration hadoopConf) throws IOException { final String basePath = conf.getString(FlinkOptions.PATH); if (!tableExists(basePath, hadoopConf)) { HoodieTableMetaClient.withPropertyBuilder() .setTableCreateSchema(conf.getString(FlinkOptions.SOURCE_AVRO_SCHEMA)) .setTableType(conf.getString(FlinkOptions.TABLE_TYPE)) .setTableName(conf.getString(FlinkOptions.TABLE_NAME)) .setDatabaseName(conf.getString(FlinkOptions.DATABASE_NAME)) .setRecordKeyFields(conf.getString(FlinkOptions.RECORD_KEY_FIELD, null)) .setPayloadClassName(conf.getString(FlinkOptions.PAYLOAD_CLASS_NAME)) .setPreCombineField(OptionsResolver.getPreCombineField(conf)) .setArchiveLogFolder(ARCHIVELOG_FOLDER.defaultValue()) .setPartitionFields(conf.getString(FlinkOptions.PARTITION_PATH_FIELD, null)) .setKeyGeneratorClassProp( conf.getOptional(FlinkOptions.KEYGEN_CLASS_NAME).orElse(SimpleAvroKeyGenerator.class.getName())) .setHiveStylePartitioningEnable(conf.getBoolean(FlinkOptions.HIVE_STYLE_PARTITIONING)) .setUrlEncodePartitioning(conf.getBoolean(FlinkOptions.URL_ENCODE_PARTITIONING)) .setCDCEnabled(conf.getBoolean(FlinkOptions.CDC_ENABLED)) .setCDCSupplementalLoggingMode(conf.getString(FlinkOptions.SUPPLEMENTAL_LOGGING_MODE)) .setTimelineLayoutVersion(1) .initTable(hadoopConf, basePath); LOG.info("Table initialized under base path {}", basePath); } else { LOG.info("Table [{}/{}] already exists, no need to initialize the table", basePath, conf.getString(FlinkOptions.TABLE_NAME)); } return StreamerUtil.createMetaClient(conf, hadoopConf); // Do not close the filesystem in order to use the CACHE, // some filesystems release the handles in #close method. }
3.68
querydsl_QueryBase_groupBy
/** * Add grouping/aggregation expressions * * @param o group by expressions * @return the current object */ public Q groupBy(Expression<?>... o) { return queryMixin.groupBy(o); }
3.68
flink_ContextResolvedTable_isTemporary
/** @return true if the table is temporary. An anonymous table is always temporary. */ public boolean isTemporary() { return catalog == null; }
3.68
MagicPlugin_MagicController_registerPreLoad
// Kind of a misnomer now, the whole notion of having plugins register in a "preload" event is flawed, // since it requires those plugins to load before magic in order to register an event handler. // Anyway, this is now done after loading is really finished. protected void registerPreLoad(ConfigurationSection configuration) { PreLoadEvent loadEvent = new PreLoadEvent(this); Bukkit.getPluginManager().callEvent(loadEvent); blockBreakManagers.addAll(loadEvent.getBlockBreakManagers()); blockBuildManagers.addAll(loadEvent.getBlockBuildManagers()); pvpManagers.addAll(loadEvent.getPVPManagers()); teamProviders.addAll(loadEvent.getTeamProviders()); castManagers.addAll(loadEvent.getCastManagers()); targetingProviders.addAll(loadEvent.getTargetingManagers()); teamProviders.addAll(loadEvent.getTeamProviders()); playerWarpManagers.putAll(loadEvent.getWarpManagers()); // Vault currency must be registered after VaultController initialization ConfigurationSection currencyConfiguration = configuration.getConfigurationSection("builtin_currency"); addCurrency(new VaultCurrency(this, currencyConfiguration.getConfigurationSection("currency"))); // Custom currencies can override the defaults for (Currency currency : loadEvent.getCurrencies()) { addCurrency(currency); } if (aureliumSkillsManager != null) { aureliumSkillsManager.register(currencyConfiguration); } if (tokenManager != null) { tokenManager.register(currencyConfiguration); } // Configured currencies override everything else currencyConfiguration = configuration.getConfigurationSection("custom_currency"); Set<String> keys = currencyConfiguration.getKeys(false); for (String key : keys) { addCurrency(new CustomCurrency(this, key, currencyConfiguration.getConfigurationSection(key))); } log("Registered currencies: " + StringUtils.join(currencies.keySet(), ",")); // Register any attribute providers that were in the PreLoadEvent. for (AttributeProvider provider : loadEvent.getAttributeProviders()) { externalProviders.add(provider); } // Re-register any providers previously registered by external plugins via register() for (MagicProvider provider : externalProviders) { registerAndUpdate(provider); } // Don't allow overriding Magic requirements checkMagicRequirements(); }
3.68
hbase_RSGroupInfoManagerImpl_getRegions
/** Returns List of Regions associated with this <code>server</code>. */ private List<RegionInfo> getRegions(final Address server) { LinkedList<RegionInfo> regions = new LinkedList<>(); for (Map.Entry<RegionInfo, ServerName> el : masterServices.getAssignmentManager() .getRegionStates().getRegionAssignments().entrySet()) { if (el.getValue() == null) { continue; } if (el.getValue().getAddress().equals(server)) { addRegion(regions, el.getKey()); } } for (RegionStateNode state : masterServices.getAssignmentManager().getRegionsInTransition()) { if ( state.getRegionLocation() != null && state.getRegionLocation().getAddress().equals(server) ) { addRegion(regions, state.getRegionInfo()); } } return regions; }
3.68
flink_DataSet_min
/** * Syntactic sugar for {@link #aggregate(Aggregations, int)} using {@link Aggregations#MIN} as * the aggregation function. * * <p><strong>Note:</strong> This operation is not to be confused with {@link #minBy(int...)}, * which selects one element with the minimum value at the specified field positions. * * @param field The index of the Tuple field on which the aggregation function is applied. * @return An AggregateOperator that represents the min'ed DataSet. * @see #aggregate(Aggregations, int) * @see #minBy(int...) */ public AggregateOperator<T> min(int field) { return aggregate(Aggregations.MIN, field); }
3.68
framework_MeasuredSize_measure
/** * Measures paddings, margins, border, height, and weight of the given * element and stores the results within this {@link MeasuredSize} object. * * @param element * element to be measured * @param thoroughSizeCheck * {@code true} if the measuring should use the more reliable * size check that requires ensuring that the element is still * present in the DOM tree, {@code false} for the slightly faster * check that will give incorrect size information if this method * is called while the element or any of its parents are in the * middle of a transform animation. * @return data object for whether the width or height of the given element * has changed since previous measure */ public MeasureResult measure(Element element, boolean thoroughSizeCheck) { if (thoroughSizeCheck && !Document.get().getBody().isOrHasChild(element)) { return new MeasureResult(false, false); } Profiler.enter("MeasuredSize.measure"); boolean heightChanged = false; boolean widthChanged = false; Profiler.enter("new ComputedStyle"); ComputedStyle computedStyle = new ComputedStyle(element); int[] paddings = computedStyle.getPadding(); // Some browsers do not reflow until accessing data from the computed // style object Profiler.leave("new ComputedStyle"); Profiler.enter("Measure paddings"); if (!heightChanged && hasHeightChanged(this.paddings, paddings)) { debugSizeChange(element, "Height (padding)", this.paddings, paddings); heightChanged = true; } if (!widthChanged && hasWidthChanged(this.paddings, paddings)) { debugSizeChange(element, "Width (padding)", this.paddings, paddings); widthChanged = true; } this.paddings = paddings; Profiler.leave("Measure paddings"); Profiler.enter("Measure margins"); int[] margins = computedStyle.getMargin(); if (!heightChanged && hasHeightChanged(this.margins, margins)) { debugSizeChange(element, "Height (margins)", this.margins, margins); heightChanged = true; } if (!widthChanged && hasWidthChanged(this.margins, margins)) { debugSizeChange(element, "Width (margins)", this.margins, margins); widthChanged = true; } this.margins = margins; Profiler.leave("Measure margins"); Profiler.enter("Measure borders"); int[] borders = computedStyle.getBorder(); if (!heightChanged && hasHeightChanged(this.borders, borders)) { debugSizeChange(element, "Height (borders)", this.borders, borders); heightChanged = true; } if (!widthChanged && hasWidthChanged(this.borders, borders)) { debugSizeChange(element, "Width (borders)", this.borders, borders); widthChanged = true; } this.borders = borders; Profiler.leave("Measure borders"); Profiler.enter("Measure height"); double requiredHeight; if (thoroughSizeCheck) { requiredHeight = computedStyle.getHeightIncludingBorderPadding(); if (Double.isNaN(requiredHeight)) { requiredHeight = 0; } } else { requiredHeight = WidgetUtil.getRequiredHeightDouble(element); } double outerHeight = requiredHeight + sumHeights(margins); double oldHeight = height; if (setOuterHeight(outerHeight)) { debugSizeChange(element, "Height (outer)", oldHeight, height); heightChanged = true; } Profiler.leave("Measure height"); Profiler.enter("Measure width"); double requiredWidth; if (thoroughSizeCheck) { requiredWidth = computedStyle.getWidthIncludingBorderPadding(); if (Double.isNaN(requiredWidth)) { requiredWidth = 0; } } else { requiredWidth = WidgetUtil.getRequiredWidthDouble(element); } double outerWidth = requiredWidth + sumWidths(margins); double oldWidth = width; if (setOuterWidth(outerWidth)) { debugSizeChange(element, "Width (outer)", oldWidth, width); widthChanged = true; } Profiler.leave("Measure width"); Profiler.leave("MeasuredSize.measure"); return new MeasureResult(widthChanged, heightChanged); }
3.68
flink_PrioritizedDeque_peek
/** * Returns the first priority element or non-priority element if the former does not exist. * * @return the first element or null. */ @Nullable public T peek() { return deque.peek(); }
3.68
streampipes_InfluxStore_createDatabase
/** * Creates a new database with the given name * * @param dbName The name of the database which should be created */ private void createDatabase(String dbName) throws SpRuntimeException { if (!dbName.matches("^[a-zA-Z_]\\w*$")) { throw new SpRuntimeException( "Database name '" + dbName + "' not allowed. Allowed names: ^[a-zA-Z_][a-zA-Z0-9_]*$"); } influxDb.query(new Query("CREATE DATABASE \"" + dbName + "\"", "")); }
3.68
hbase_RegionPlan_compareTo
/** * Compare the region info. * @param other region plan you are comparing against */ @Override public int compareTo(RegionPlan other) { return compareTo(this, other); }
3.68
starts_Writer_writeGraph
/** * Write the graph to file, together with any new edges (if any) that we get * from parsing classes that changed. * * @param graph The graph that we want to write * @param artifactsDir The directory in which we are writing STARTS artifacts * @param print Write graph to file if true * @param graphFile The file in which to optionally write the graph */ public static void writeGraph(DirectedGraph<String> graph, String artifactsDir, boolean print, String graphFile) { if (print) { String outFilename = artifactsDir + File.separator + graphFile; try (BufferedWriter writer = getWriter(outFilename)) { if (graph == null) { writer.write(EMPTY); return; } // write all the edges in the graph for (Edge<String> edge : graph.getEdges()) { writer.write(edge.getSource() + WHITE_SPACE + edge.getDestination() + System.lineSeparator()); } } catch (IOException ioe) { ioe.printStackTrace(); } } }
3.68
AreaShop_RegionSign_runSignCommands
/** * Run commands when a player clicks a sign. * @param clicker The player that clicked the sign * @param clickType The type of clicking * @return true if the commands ran successfully, false if any of them failed */ public boolean runSignCommands(Player clicker, GeneralRegion.ClickType clickType) { ConfigurationSection signConfig = getProfile(); if(signConfig == null) { return false; } ConfigurationSection stateConfig = signConfig.getConfigurationSection(getRegion().getState().getValue().toLowerCase()); // Run player commands if specified List<String> playerCommands = new ArrayList<>(); for(String command : stateConfig.getStringList(clickType.getValue() + "Player")) { // TODO move variable checking code to InteractiveMessenger? playerCommands.add(command.replace(Message.VARIABLE_START + AreaShop.tagClicker + Message.VARIABLE_END, clicker.getName())); } getRegion().runCommands(clicker, playerCommands); // Run console commands if specified List<String> consoleCommands = new ArrayList<>(); for(String command : stateConfig.getStringList(clickType.getValue() + "Console")) { consoleCommands.add(command.replace(Message.VARIABLE_START + AreaShop.tagClicker + Message.VARIABLE_END, clicker.getName())); } getRegion().runCommands(Bukkit.getConsoleSender(), consoleCommands); return !playerCommands.isEmpty() || !consoleCommands.isEmpty(); }
3.68
pulsar_DLOutputStream_closeAsync
/** * Every package will be a stream. So we need mark the stream as EndOfStream when the stream * write done. * * @return */ CompletableFuture<Void> closeAsync() { return writer.markEndOfStream() .thenCompose(ignore -> writer.asyncClose()) .thenCompose(ignore -> distributedLogManager.asyncClose()); }
3.68
hbase_MultipleColumnPrefixFilter_parseFrom
/** * Parse a serialized representation of {@link MultipleColumnPrefixFilter} * @param pbBytes A pb serialized {@link MultipleColumnPrefixFilter} instance * @return An instance of {@link MultipleColumnPrefixFilter} made from <code>bytes</code> * @throws DeserializationException if an error occurred * @see #toByteArray */ public static MultipleColumnPrefixFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.MultipleColumnPrefixFilter proto; try { proto = FilterProtos.MultipleColumnPrefixFilter.parseFrom(pbBytes); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } int numPrefixes = proto.getSortedPrefixesCount(); byte[][] prefixes = new byte[numPrefixes][]; for (int i = 0; i < numPrefixes; ++i) { prefixes[i] = proto.getSortedPrefixes(i).toByteArray(); } return new MultipleColumnPrefixFilter(prefixes); }
3.68
AreaShop_ImportJob_minutesToString
/** * Convert minutes to a human-readable string. * @param minutes Value to convert * @return String that represents the same length of time in a readable format, like "1 day", "5 minutes", "3 months" */ private String minutesToString(long minutes) { // If the specified number of minutes can map nicely to a higher unit, use that one String resultUnit = "minute"; long resultValue = minutes; for(TimeUnit unit : timeUnitLookup) { long result = minutes / unit.minutes; if(resultValue * unit.minutes == minutes) { resultUnit = unit.identifier; resultValue = result; break; } } return resultValue + " " + resultUnit + (resultValue == 1 ? "" : "s"); }
3.68
hadoop_AuxServiceConfiguration_toIndentedString
/** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); }
3.68
hbase_ParseFilter_parseComparator
/** * Splits a column in comparatorType:comparatorValue form into separate byte arrays * <p> * @param comparator the comparator * @return the parsed arguments of the comparator as a 2D byte array */ public static byte[][] parseComparator(byte[] comparator) { final int index = Bytes.searchDelimiterIndex(comparator, 0, comparator.length, ParseConstants.COLON); if (index == -1) { throw new IllegalArgumentException("Incorrect comparator"); } byte[][] result = new byte[2][0]; result[0] = new byte[index]; System.arraycopy(comparator, 0, result[0], 0, index); final int len = comparator.length - (index + 1); result[1] = new byte[len]; System.arraycopy(comparator, index + 1, result[1], 0, len); return result; }
3.68
flink_LogicalTypeJsonDeserializer_deserializeInternal
/** * Deserialize json according to the original type root. It's reverse operation of {@code * SerializerWIP#serializeinternal}. */ private LogicalType deserializeInternal(JsonNode logicalTypeNode) { LogicalTypeRoot typeRoot = LogicalTypeRoot.valueOf(logicalTypeNode.get(FIELD_NAME_TYPE_NAME).asText()); // the NullType's Json doesn't have other field, so return in advance if (typeRoot.equals(LogicalTypeRoot.NULL)) { return new NullType(); } boolean isNullable = logicalTypeNode.get(FIELD_NAME_NULLABLE).asBoolean(); switch (typeRoot) { case BOOLEAN: return new BooleanType(isNullable); case TINYINT: return new TinyIntType(isNullable); case SMALLINT: return new SmallIntType(isNullable); case INTEGER: return new IntType(isNullable); case BIGINT: return new BigIntType(isNullable); case FLOAT: return new FloatType(isNullable); case DOUBLE: return new DoubleType(isNullable); case DATE: return new DateType(isNullable); case CHAR: case VARCHAR: case BINARY: case VARBINARY: return deserializeLengthFieldType(typeRoot, logicalTypeNode).copy(isNullable); case DECIMAL: return new DecimalType( isNullable, logicalTypeNode.get(FIELD_NAME_PRECISION).asInt(), logicalTypeNode.get(FIELD_NAME_SCALE).asInt()); case TIME_WITHOUT_TIME_ZONE: case TIMESTAMP_WITHOUT_TIME_ZONE: case TIMESTAMP_WITH_TIME_ZONE: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: return deserializeTimestamp(typeRoot, logicalTypeNode).copy(isNullable); case INTERVAL_DAY_TIME: case INTERVAL_YEAR_MONTH: return deserializeInterval(isNullable, typeRoot, logicalTypeNode); case MAP: return deserializeMap(logicalTypeNode).copy(isNullable); case ARRAY: case MULTISET: return deserializeCollection(typeRoot, logicalTypeNode).copy(isNullable); case ROW: return deserializeRow(logicalTypeNode).copy(isNullable); case RAW: return deserializeRaw(logicalTypeNode).copy(isNullable); default: throw new UnsupportedOperationException( String.format( "Unable to deserialize a logical type of type root '%s'. Please check the documentation for supported types.", typeRoot.name())); } }
3.68
shardingsphere-elasticjob_ConfigurationService_setUpJobConfiguration
/** * Set up job configuration. * * @param jobClassName job class name * @param jobConfig job configuration to be updated * @return accepted job configuration */ public JobConfiguration setUpJobConfiguration(final String jobClassName, final JobConfiguration jobConfig) { checkConflictJob(jobClassName, jobConfig); if (!jobNodeStorage.isJobNodeExisted(ConfigurationNode.ROOT) || jobConfig.isOverwrite()) { jobNodeStorage.replaceJobNode(ConfigurationNode.ROOT, YamlEngine.marshal(JobConfigurationPOJO.fromJobConfiguration(jobConfig))); jobNodeStorage.replaceJobRootNode(jobClassName); return jobConfig; } return load(false); }
3.68
hadoop_BaseService_destroy
/** * Destroy the services. This method is called once, when the * {@link Server} owning the service is being destroyed. * <p> * This method does a NOP. */ @Override public void destroy() { }
3.68
framework_TabSheetScrollOnTabClose_getTicketNumber
/* * (non-Javadoc) * * @see com.vaadin.tests.components.AbstractTestUI#getTicketNumber() */ @Override protected Integer getTicketNumber() { return 14348; }
3.68
flink_DataStream_shuffle
/** * Sets the partitioning of the {@link DataStream} so that the output elements are shuffled * uniformly randomly to the next operation. * * @return The DataStream with shuffle partitioning set. */ @PublicEvolving public DataStream<T> shuffle() { return setConnectionType(new ShufflePartitioner<T>()); }
3.68
framework_TooltipInfo_setErrorLevel
/** * Sets the error level. * * @param errorLevel * the error level to set * @since 8.2 */ public void setErrorLevel(ErrorLevel errorLevel) { this.errorLevel = errorLevel; }
3.68
dubbo_Stack_remove
/** * remove. * * @param index * @return element */ public E remove(int index) { if (index >= mSize || index + mSize < 0) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + mSize); } E ret = mElements.remove(index < 0 ? index + mSize : index); mSize--; return ret; }
3.68
hbase_Threads_sleep
/** * If interrupted, just prints out the interrupt on STDOUT, resets interrupt and returns * @param millis How long to sleep for in milliseconds. */ public static void sleep(long millis) { try { Thread.sleep(millis); } catch (InterruptedException e) { LOG.warn("sleep interrupted", e); Thread.currentThread().interrupt(); } }
3.68
framework_EditorHandler_failure
/** * Informs Grid that an error occurred while trying to process the * request. This method is a short-hand for calling {@link #failure()} * and {@link Editor#setEditorError(String, Collection)} * * @param errorMessage * and error message to show to the user, or * <code>null</code> to not show any message. * @param errorColumns * a collection of columns for which an error indicator * should be shown, or <code>null</code> if no columns should * be marked as erroneous. * * @see Editor#setEditorError(String, Collection) */ public default void failure(String errorMessage, Collection<Grid.Column<?, T>> errorColumns) { failure(); getGrid().getEditor().setEditorError(errorMessage, errorColumns); }
3.68
hadoop_LoadedManifestData_getEntrySequenceData
/** * Get the path to the entry sequence data file. * @return the path */ public Path getEntrySequenceData() { return entrySequenceData; }
3.68
flink_DateTimeUtils_ymdhms
/** Appends year-month-day and hour:minute:second to a buffer; assumes they are valid. */ private static StringBuilder ymdhms( StringBuilder b, int year, int month, int day, int h, int m, int s) { ymd(b, year, month, day); b.append(' '); hms(b, h, m, s); return b; }
3.68
MagicPlugin_Targeting_getPreviousBlock
/** * Returns the previous block along the line of vision * * @return The block */ public Block getPreviousBlock() { return previousBlock; }
3.68
querydsl_NumberExpression_ltAll
/** * Create a {@code this < all right} expression * * @param right rhs * @return this &lt; all right */ public BooleanExpression ltAll(CollectionExpression<?, ? super T> right) { return lt(ExpressionUtils.<T> all(right)); }
3.68
hbase_User_getName
/** * Returns the full user name. For Kerberos principals this will include the host and realm * portions of the principal name. * @return User full name. */ public String getName() { return ugi.getUserName(); }
3.68
querydsl_GuavaGroupByBuilder_asTable
/** * Get the results as sorted table * * @param column column expression * @param expression value expression * @param <C> Column type * @param <V> Value type * @return new result transformer */ public <C, V> ResultTransformer<Table<K, C, V>> asTable(final Expression<C> column, final Expression<V> expression) { final Expression<C> columnKeyLookup = getLookup(column); final Expression<V> lookup = getLookup(expression); return new GroupByTable<K, C, V, Table<K, C, V>>(key, column, expression) { @Override protected Table<K, C, V> transform(Table<K, ?, Group> groups) { Table<K, C, V> results = HashBasedTable.create(); for (Cell<K, ?, Group> cell : groups.cellSet()) { K rowKey = cell.getRowKey(); C columnKey = cell.getValue().getOne(columnKeyLookup); V value = cell.getValue().getOne(lookup); results.put(rowKey, columnKey, value); } return results; } }; }
3.68
hbase_RegionPlacementMaintainer_printAssignmentPlan
/** * Print the assignment plan to the system output stream */ public static void printAssignmentPlan(FavoredNodesPlan plan) { if (plan == null) return; LOG.info("========== Start to print the assignment plan ================"); // sort the map based on region info Map<String, List<ServerName>> assignmentMap = new TreeMap<>(plan.getAssignmentMap()); for (Map.Entry<String, List<ServerName>> entry : assignmentMap.entrySet()) { String serverList = FavoredNodeAssignmentHelper.getFavoredNodesAsString(entry.getValue()); String regionName = entry.getKey(); LOG.info("Region: " + regionName); LOG.info("Its favored nodes: " + serverList); } LOG.info("========== Finish to print the assignment plan ================"); }
3.68
AreaShop_RegionGroup_removeWorld
/** * Remove a member from the group. * @param world World to remove * @return true if the region was in the group before, otherwise false */ public boolean removeWorld(String world) { if(worlds.remove(world)) { setSetting("regionsFromWorlds", new ArrayList<>(worlds)); saveRequired(); autoDirty(); return true; } return false; }
3.68
hudi_FileSystemViewManager_createRemoteFileSystemView
/** * Create a remote file System view for a table. * * @param conf Hadoop Configuration * @param viewConf View Storage Configuration * @param metaClient Hoodie Table MetaClient for the table. * @return */ private static RemoteHoodieTableFileSystemView createRemoteFileSystemView(SerializableConfiguration conf, FileSystemViewStorageConfig viewConf, HoodieTableMetaClient metaClient) { LOG.info("Creating remote view for basePath " + metaClient.getBasePath() + ". Server=" + viewConf.getRemoteViewServerHost() + ":" + viewConf.getRemoteViewServerPort() + ", Timeout=" + viewConf.getRemoteTimelineClientTimeoutSecs()); return new RemoteHoodieTableFileSystemView(metaClient, viewConf); }
3.68
hudi_QuickstartUtils_generateDeletes
/** * Generates delete records for the passed in rows. * * @param rows List of {@link Row}s for which delete record need to be generated * @return list of hoodie records to delete */ public List<String> generateDeletes(List<Row> rows) { // if row.length() == 2, then the record contains "uuid" and "partitionpath" fields, otherwise, // another field "ts" is available return rows.stream().map(row -> row.length() == 2 ? convertToString(row.getAs("uuid"), row.getAs("partitionpath"), null) : convertToString(row.getAs("uuid"), row.getAs("partitionpath"), row.getAs("ts")) ).filter(os -> os.isPresent()).map(os -> os.get()) .collect(Collectors.toList()); }
3.68
dubbo_ConfigValidationUtils_checkMock
/** * Legitimacy check and setup of local simulated operations. The operations can be a string with Simple operation or * a classname whose {@link Class} implements a particular function * * @param interfaceClass for provider side, it is the {@link Class} of the service that will be exported; for consumer * side, it is the {@link Class} of the remote service interface that will be referenced */ public static void checkMock(Class<?> interfaceClass, AbstractInterfaceConfig config) { String mock = config.getMock(); if (ConfigUtils.isEmpty(mock)) { return; } String normalizedMock = MockInvoker.normalizeMock(mock); if (normalizedMock.startsWith(RETURN_PREFIX)) { normalizedMock = normalizedMock.substring(RETURN_PREFIX.length()).trim(); try { // Check whether the mock value is legal, if it is illegal, throw exception MockInvoker.parseMockValue(normalizedMock); } catch (Exception e) { throw new IllegalStateException( "Illegal mock return in <dubbo:service/reference ... " + "mock=\"" + mock + "\" />"); } } else if (normalizedMock.startsWith(THROW_PREFIX)) { normalizedMock = normalizedMock.substring(THROW_PREFIX.length()).trim(); if (ConfigUtils.isNotEmpty(normalizedMock)) { try { // Check whether the mock value is legal MockInvoker.getThrowable(normalizedMock); } catch (Exception e) { throw new IllegalStateException( "Illegal mock throw in <dubbo:service/reference ... " + "mock=\"" + mock + "\" />"); } } } else { // Check whether the mock class is a implementation of the interfaceClass, and if it has a default // constructor MockInvoker.getMockObject(config.getScopeModel().getExtensionDirector(), normalizedMock, interfaceClass); } }
3.68
flink_SqlFunctionUtils_cot
/** SQL <code>COT</code> operator applied to double values. */ public static double cot(double b0) { return 1.0d / Math.tan(b0); }
3.68
AreaShop_FileManager_loadFiles
/** * Load all files from disk. * @param thisTick Load files in the current tick or a tick later * @return true if the files are loaded correctly, otherwise false */ public boolean loadFiles(boolean thisTick) { // Load config.yml + add defaults from .jar boolean result = loadConfigFile(); // Load default.yml + add defaults from .jar result &= loadDefaultFile(); // Convert old formats to the latest (object saving to .yml saving) preUpdateFiles(); if(thisTick) { // Load region files (regions folder) loadRegionFiles(); // Convert old formats to the latest (changes in .yml saving format) postUpdateFiles(); // Load groups.yml result &= loadGroupsFile(); } else { Do.sync(() -> { // Load region files (regions folder) loadRegionFiles(); // Convert old formats to the latest (changes in .yml saving format) postUpdateFiles(); // Load groups.yml loadGroupsFile(); }); } return result; }
3.68
framework_AbstractDateField_setDefaultValue
/** * Sets the default value for the field. The default value is the starting * point for the date field when nothing has been selected yet. If no * default value is set, current date/time is used. * * @param defaultValue * the default value, may be {@code null} * @since 8.1.2 */ public void setDefaultValue(T defaultValue) { this.defaultValue = defaultValue; updateResolutions(); }
3.68
morf_OracleMetaDataProvider_dataTypeForColumn
/** * Get our {@link DataType} from the Oracle type. This serves the same purpose * as {@link DatabaseMetaDataProvider#dataTypeFromSqlType(int, String, int)} but is * entirely Oracle specific. * * @param dataTypeName The Oracle type name. * @param commentType the type of the column stored in a comment. * @return The DataType. * @throws UnexpectedDataTypeException If data type cannot be parsed. */ private static DataType dataTypeForColumn(String columnName, String dataTypeName, String commentType) { /* * Oracle stores all numeric types as 'NUMBER', so we have no easy way of * identifying fields such as 'int' or 'big int'. As such, the actual data * type of the column is stored in a comment against that column. Hence, if * we're given a type from a comment then try and use that, only falling * back to the matching below if we don't have/find one. * * It's not possible to reverse engineer the type from the database because * of things such as foreign keys: although the ID column is a 'big int', the * actual value on a column that links to this ID will be stored as a decimal * in most cases. */ if (StringUtils.isNotEmpty(commentType)) { for (DataType dataType : DataType.values()) { if (dataType.toString().equals(commentType)) { return dataType; } } } if ("NVARCHAR2".equals(dataTypeName) || "VARCHAR2".equals(dataTypeName)) { return DataType.STRING; } else if ("NUMBER".equals(dataTypeName)) { return DataType.DECIMAL; } else if ("BLOB".equals(dataTypeName)) { return DataType.BLOB; } else if ("NCLOB".equals(dataTypeName)) { return DataType.CLOB; } else if ("DATE".equals(dataTypeName)) { return DataType.DATE; } else { throw new DatabaseMetaDataProvider.UnexpectedDataTypeException("Unsupported data type [" + dataTypeName + "]" + " in [" + columnName + "]"); } }
3.68
querydsl_Expressions_dslTemplate
/** * Create a new Template expression * * @param cl type of expression * @param template template * @param args template parameters * @return template expression */ public static <T> DslTemplate<T> dslTemplate(Class<? extends T> cl, Template template, List<?> args) { return new DslTemplate<T>(cl, template, args); }
3.68
flink_SplitFetcherManager_close
/** * Close the split fetcher manager. * * @param timeoutMs the max time in milliseconds to wait. * @throws Exception when failed to close the split fetcher manager. */ public synchronized void close(long timeoutMs) throws Exception { closed = true; fetchers.values().forEach(SplitFetcher::shutdown); executors.shutdown(); if (!executors.awaitTermination(timeoutMs, TimeUnit.MILLISECONDS)) { LOG.warn( "Failed to close the source reader in {} ms. There are still {} split fetchers running", timeoutMs, fetchers.size()); } }
3.68
flink_ChainedStateHandle_get
/** * Get the state handle for a single operator in the operator chain by it's index. * * @param index the index in the operator chain * @return state handle to the operator at the given position in the operator chain. can be * null. */ public T get(int index) { return operatorStateHandles.get(index); }
3.68
hadoop_OBSFileSystem_getListParallelFactor
/** * Return list parallel factor. * * @return the list parallel factor */ int getListParallelFactor() { return listParallelFactor; }
3.68
morf_AbstractSqlDialectTest_testUseIndexOnSubquery
/** * Check that we don't allow the use of the use index hint on a subquery. */ @Test(expected = IllegalArgumentException.class) public void testUseIndexOnSubquery() { testDialect.convertStatementToSQL( select().from(select().from("Foo").useIndex(tableRef("Foo"), "Foo_1")) ); }
3.68
framework_DesignFormatter_getRegisteredClasses
/** * Returns a set of classes that have a converter registered. This is <b>not * the same</b> as the list of supported classes - subclasses of classes in * this set are also supported. * * @return An unmodifiable set of classes that have a converter registered. */ protected Set<Class<?>> getRegisteredClasses() { return Collections.unmodifiableSet(converterMap.keySet()); }
3.68
hadoop_BondedS3AStatisticsContext_newCommitterStatistics
/** * Create a new instance of the committer statistics. * @return a new committer statistics instance */ @Override public CommitterStatistics newCommitterStatistics() { return getInstrumentation().newCommitterStatistics(); }
3.68
hbase_QuotaCache_getRegionServerQuotaLimiter
/** * Returns the limiter associated to the specified region server. * @param regionServer the region server to limit * @return the limiter associated to the specified region server */ public QuotaLimiter getRegionServerQuotaLimiter(final String regionServer) { return getQuotaState(this.regionServerQuotaCache, regionServer).getGlobalLimiter(); }
3.68
hbase_MetricsSource_incrSizeOfLogQueue
/** * Increment size of the log queue. */ public void incrSizeOfLogQueue() { singleSourceSource.incrSizeOfLogQueue(1); globalSourceSource.incrSizeOfLogQueue(1); }
3.68
rocketmq-connect_LogReporter_report
/** * Log error context. * * @param context the processing context. */ @Override public void report(ProcessingContext context) { errorMetricsGroup.recordErrorLogged(); log.error(message(context), context.error()); }
3.68
hadoop_PlacementConstraints_build
/** * Creates a {@link PlacementConstraint} given a constraint expression. * * @param constraintExpr the constraint expression * @return the placement constraint */ public static PlacementConstraint build(AbstractConstraint constraintExpr) { return constraintExpr.build(); }
3.68
hadoop_Validate_checkIntegerMultiple
/** * Validates that the first value is an integer multiple of the second value. * @param value1 the first value to check. * @param value1Name the name of the first argument. * @param value2 the second value to check. * @param value2Name the name of the second argument. */ public static void checkIntegerMultiple( long value1, String value1Name, long value2, String value2Name) { checkArgument( (value1 % value2) == 0, "'%s' (%s) must be an integer multiple of '%s' (%s).", value1Name, value1, value2Name, value2); }
3.68
flink_LocatableInputSplit_getHostnames
/** * Returns the names of the hosts storing the data this input split refers to * * @return the names of the hosts storing the data this input split refers to */ public String[] getHostnames() { return this.hostnames; }
3.68
hbase_HFileWriterImpl_appendFileInfo
/** * Add to the file info. All added key/value pairs can be obtained using * {@link HFile.Reader#getHFileInfo()}. * @param k Key * @param v Value * @throws IOException in case the key or the value are invalid */ @Override public void appendFileInfo(final byte[] k, final byte[] v) throws IOException { fileInfo.append(k, v, true); }
3.68
dubbo_DeadlineFuture_newFuture
/** * init a DeadlineFuture 1.init a DeadlineFuture 2.timeout check * * @param timeout timeout in Mills * @return a new DeadlineFuture */ public static DeadlineFuture newFuture( String serviceName, String methodName, String address, int timeout, ExecutorService executor) { final DeadlineFuture future = new DeadlineFuture(serviceName, methodName, address, timeout); future.setExecutor(executor); return future; }
3.68
hadoop_WrappedIOStatistics_getWrapped
/** * Get at the wrapped inner statistics. * @return the wrapped value */ protected IOStatistics getWrapped() { return wrapped; }
3.68
framework_AbsoluteLayoutResizeComponents_addStartWithDefinedWidthAbsoluteLayout
/** * Build test layout for #8257 */ private void addStartWithDefinedWidthAbsoluteLayout(AbsoluteLayout layout) { AbsoluteLayout layoutExpading = new AbsoluteLayout(); layoutExpading.setWidth("250px"); layoutExpading.addComponent( new Panel(new CssLayout(new Label("Start Width 250px")))); layoutExpading.setId("absolute-expanding"); layout.addComponent(layoutExpading, "right:0;top:200px;"); layout.addComponent(expandButton(layoutExpading), "left: 10x; top: 250px;"); }
3.68
hmily_MetricsReporter_gaugeIncrement
/** * Gauge increment. * * @param name name */ public static void gaugeIncrement(final String name) { gaugeIncrement(name, null); }
3.68
framework_VTooltip_setCloseTimeout
/** * Sets the time (in ms) the tooltip should be displayed after an event that * will cause it to be closed (e.g. mouse click outside the component, key * down). * * @param closeTimeout * The close timeout (in ms) */ public void setCloseTimeout(int closeTimeout) { this.closeTimeout = closeTimeout; }
3.68
flink_MemoryUtils_wrapUnsafeMemoryWithByteBuffer
/** * Wraps the unsafe native memory with a {@link ByteBuffer}. * * @param address address of the unsafe memory to wrap * @param size size of the unsafe memory to wrap * @return a {@link ByteBuffer} which is a view of the given unsafe memory */ static ByteBuffer wrapUnsafeMemoryWithByteBuffer(long address, int size) { //noinspection OverlyBroadCatchBlock try { ByteBuffer buffer = (ByteBuffer) UNSAFE.allocateInstance(DIRECT_BYTE_BUFFER_CLASS); UNSAFE.putLong(buffer, BUFFER_ADDRESS_FIELD_OFFSET, address); UNSAFE.putInt(buffer, BUFFER_CAPACITY_FIELD_OFFSET, size); buffer.clear(); return buffer; } catch (Throwable t) { throw new Error("Failed to wrap unsafe off-heap memory with ByteBuffer", t); } }
3.68
hbase_Result_rawCells
/** * Return the array of Cells backing this Result instance. The array is sorted from smallest -&gt; * largest using the {@link CellComparator}. The array only contains what your Get or Scan * specifies and no more. For example if you request column "A" 1 version you will have at most 1 * Cell in the array. If you request column "A" with 2 version you will have at most 2 Cells, with * the first one being the newer timestamp and the second being the older timestamp (this is the * sort order defined by {@link CellComparator}). If columns don't exist, they won't be present in * the result. Therefore if you ask for 1 version all columns, it is safe to iterate over this * array and expect to see 1 Cell for each column and no more. This API is faster than using * getFamilyMap() and getMap() * @return array of Cells; can be null if nothing in the result */ public Cell[] rawCells() { return cells; }
3.68
flink_CompileUtils_compileExpression
/** * Compiles an expression code to a janino {@link ExpressionEvaluator}. * * @param code the expression code * @param argumentNames the expression argument names * @param argumentClasses the expression argument classes * @param returnClass the return type of the expression * @return the compiled class */ public static ExpressionEvaluator compileExpression( String code, List<String> argumentNames, List<Class<?>> argumentClasses, Class<?> returnClass) { try { ExpressionKey key = new ExpressionKey(code, argumentNames, argumentClasses, returnClass); return COMPILED_EXPRESSION_CACHE.get( key, () -> { ExpressionEvaluator expressionEvaluator = new ExpressionEvaluator(); // Input args expressionEvaluator.setParameters( argumentNames.toArray(new String[0]), argumentClasses.toArray(new Class[0])); // Result type expressionEvaluator.setExpressionType(returnClass); try { // Compile expressionEvaluator.cook(code); } catch (CompileException e) { throw new InvalidProgramException( "Table program cannot be compiled. This is a bug. Please file an issue.\nExpression: " + code, e); } return expressionEvaluator; }); } catch (Exception e) { throw new FlinkRuntimeException(e.getMessage(), e); } }
3.68
hbase_ParseFilter_getSupportedFilters
/** * Return a Set of filters supported by the Filter Language */ public Set<String> getSupportedFilters() { return filterHashMap.keySet(); }
3.68
cron-utils_StringUtils_isEmpty
/** * <p>Checks if a CharSequence is empty ("") or null.</p> * * <pre> * StringUtils.isEmpty(null) = true * StringUtils.isEmpty("") = true * StringUtils.isEmpty(" ") = false * StringUtils.isEmpty("bob") = false * StringUtils.isEmpty(" bob ") = false * </pre> * * <p>NOTE: This method changed in Lang version 2.0. * It no longer trims the CharSequence. * That functionality is available in isBlank().</p> * * @param cs the CharSequence to check, may be null * @return {@code true} if the CharSequence is empty or null * @since 3.0 Changed signature from isEmpty(String) to isEmpty(CharSequence) */ public static boolean isEmpty(final CharSequence cs) { return cs == null || cs.length() == 0; }
3.68
flink_NFAStateNameHandler_getOriginalNameFromInternal
/** * Implements the reverse process of the {@link #getUniqueInternalName(String)}. * * @param internalName The name to be decoded. * @return The original, user-specified name for the state. */ public static String getOriginalNameFromInternal(String internalName) { Preconditions.checkNotNull(internalName); return internalName.split(STATE_NAME_DELIM)[0]; }
3.68
hibernate-validator_ConstraintViolationImpl_getExpressionVariables
/** * @return the expression variables added using {@link HibernateConstraintValidatorContext#addExpressionVariable(String, Object)} */ public Map<String, Object> getExpressionVariables() { return expressionVariables; }
3.68
morf_H2Dialect_getSqlForRowNumber
/** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForRowNumber() */ @Override protected String getSqlForRowNumber() { return "ROW_NUMBER() OVER()"; }
3.68
hbase_HFileLink_isHFileLink
/** * @param fileName File name to check. * @return True if the path is a HFileLink. */ public static boolean isHFileLink(String fileName) { Matcher m = LINK_NAME_PATTERN.matcher(fileName); if (!m.matches()) { return false; } return m.groupCount() > 2 && m.group(4) != null && m.group(3) != null && m.group(2) != null; }
3.68
cron-utils_FieldParser_intToInt
/** * Maps integer values to another integer equivalence. Always consider mapping higher integers to lower once. Ex.: if 0 and 7 mean the * same, map 7 to 0. * * @param exp - integer to be mapped * @return Mapping integer. If no mapping int is found, will return exp */ @VisibleForTesting protected int intToInt(final Integer exp) { final Integer value = fieldConstraints.getIntMappingValue(exp); if (value != null) { return value; } return exp; }
3.68
hbase_BackupManifest_canCoverImage
/** * Check whether backup image set could cover a backup image or not. * @param fullImages The backup image set * @param image The target backup image * @return true if fullImages can cover image, otherwise false */ public static boolean canCoverImage(ArrayList<BackupImage> fullImages, BackupImage image) { // fullImages can cover image only when the following conditions are satisfied: // - each image of fullImages must not be an incremental image; // - each image of fullImages must be taken after image has been taken; // - sum table set of fullImages must cover the table set of image. for (BackupImage image1 : fullImages) { if (image1.getType() == BackupType.INCREMENTAL) { return false; } if (image1.getStartTs() < image.getStartTs()) { return false; } } ArrayList<String> image1TableList = new ArrayList<>(); for (BackupImage image1 : fullImages) { List<TableName> tableList = image1.getTableNames(); for (TableName table : tableList) { image1TableList.add(table.getNameAsString()); } } ArrayList<String> image2TableList = new ArrayList<>(); List<TableName> tableList = image.getTableNames(); for (TableName table : tableList) { image2TableList.add(table.getNameAsString()); } for (int i = 0; i < image2TableList.size(); i++) { if (image1TableList.contains(image2TableList.get(i)) == false) { return false; } } LOG.debug("Full image set can cover image " + image.getBackupId()); return true; }
3.68
dubbo_RpcContextAttachment_setAttachment
/** * set attachment. * * @param key * @param value * @return context */ @Override public RpcContextAttachment setAttachment(String key, String value) { return setObjectAttachment(key, (Object) value); }
3.68
hbase_CatalogJanitor_scanForReport
/** * Scan hbase:meta. * @return Return generated {@link CatalogJanitorReport} */ // will be override in tests. protected CatalogJanitorReport scanForReport() throws IOException { ReportMakingVisitor visitor = new ReportMakingVisitor(this.services); // Null tablename means scan all of meta. MetaTableAccessor.scanMetaForTableRegions(this.services.getConnection(), visitor, null); return visitor.getReport(); }
3.68
flink_UserDefinedFunctionHelper_isClassNameSerializable
/** * Returns whether a {@link UserDefinedFunction} can be easily serialized and identified by only * a fully qualified class name. It must have a default constructor and no serializable fields. * * <p>Other properties (such as checks for abstract classes) are validated at the entry points * of the API, see {@link #prepareInstance(ReadableConfig, UserDefinedFunction)}. */ public static boolean isClassNameSerializable(UserDefinedFunction function) { final Class<?> functionClass = function.getClass(); if (!InstantiationUtil.hasPublicNullaryConstructor(functionClass)) { // function must be parameterized return false; } Class<?> currentClass = functionClass; while (!currentClass.equals(UserDefinedFunction.class)) { for (Field field : currentClass.getDeclaredFields()) { if (!Modifier.isTransient(field.getModifiers()) && !Modifier.isStatic(field.getModifiers())) { // function seems to be stateful return false; } } currentClass = currentClass.getSuperclass(); } return true; }
3.68
hadoop_OBSInputStream_readFully
/** * Subclass {@code readFully()} operation which only seeks at the start of the * series of operations; seeking back at the end. * * <p>This is significantly higher performance if multiple read attempts * are needed to fetch the data, as it does not break the HTTP connection. * * <p>To maintain thread safety requirements, this operation is * synchronized for the duration of the sequence. {@inheritDoc} */ @Override public void readFully(final long position, final byte[] buffer, final int offset, final int length) throws IOException { long startTime = System.currentTimeMillis(); long threadId = Thread.currentThread().getId(); checkNotClosed(); validatePositionedReadArgs(position, buffer, offset, length); if (length == 0) { return; } int nread = 0; synchronized (this) { long oldPos = getPos(); try { seek(position); while (nread < length) { int nbytes = read(buffer, offset + nread, length - nread); if (nbytes < 0) { throw new EOFException( FSExceptionMessages.EOF_IN_READ_FULLY); } nread += nbytes; } } finally { seekQuietly(oldPos); } } long endTime = System.currentTimeMillis(); LOG.debug( "ReadFully uri:{}, contentLength:{}, destLen:{}, readLen:{}, " + "position:{}, thread:{}, timeUsedMilliSec:{}", uri, contentLength, length, nread, position, threadId, endTime - startTime); }
3.68
hadoop_MutableGaugeInt_set
/** * Set the value of the metric * @param value to set */ public void set(int value) { this.value.set(value); setChanged(); }
3.68
hadoop_DiskBalancerWorkItem_getSecondsElapsed
/** * Gets the number of seconds elapsed from the start time. * * The reason why we have this is of time skews. The client's current time * may not match with the server time stamp, hence the elapsed second * cannot be computed from only startTime. * * @return seconds elapsed from start time. */ public long getSecondsElapsed() { return secondsElapsed; }
3.68
hbase_Result_containsNonEmptyColumn
/** * Checks if the specified column contains a non-empty value (not a zero-length byte array). * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @return whether or not a latest value exists and is not empty */ public boolean containsNonEmptyColumn(byte[] family, int foffset, int flength, byte[] qualifier, int qoffset, int qlength) { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); return (kv != null) && (kv.getValueLength() > 0); }
3.68
flink_JavaFieldPredicates_isFinal
/** * Match the final modifier of the {@link JavaField}. * * @return A {@link DescribedPredicate} returning true, if and only if the tested {@link * JavaField} has the final modifier. */ public static DescribedPredicate<JavaField> isFinal() { return DescribedPredicate.describe( "final", field -> field.getModifiers().contains(JavaModifier.FINAL)); }
3.68
framework_Page_updateBrowserWindowSize
/** * For internal use only. Updates the internal state with the given values. * Does not resize the Page or browser window. * * @since 7.2 * * @param width * the new browser window width * @param height * the new browser window height * @param fireEvents * whether to fire {@link BrowserWindowResizeEvent} if the size * changes */ public void updateBrowserWindowSize(int width, int height, boolean fireEvents) { boolean sizeChanged = false; if (width != browserWindowWidth) { browserWindowWidth = width; sizeChanged = true; } if (height != browserWindowHeight) { browserWindowHeight = height; sizeChanged = true; } if (fireEvents && sizeChanged) { fireEvent(new BrowserWindowResizeEvent(this, browserWindowWidth, browserWindowHeight)); } }
3.68
flink_TSetClientInfoReq_findByThriftId
/** Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch (fieldId) { case 1: // SESSION_HANDLE return SESSION_HANDLE; case 2: // CONFIGURATION return CONFIGURATION; default: return null; } }
3.68
flink_DecimalDataUtils_sround
/** SQL <code>ROUND</code> operator applied to BigDecimal values. */ public static DecimalData sround(DecimalData b0, int r) { if (r >= b0.scale) { return b0; } BigDecimal b2 = b0.toBigDecimal() .movePointRight(r) .setScale(0, RoundingMode.HALF_UP) .movePointLeft(r); int p = b0.precision; int s = b0.scale; if (r < 0) { return fromBigDecimal(b2, Math.min(38, 1 + p - s), 0); } else { // 0 <= r < s return fromBigDecimal(b2, 1 + p - s + r, r); } }
3.68
hudi_DataSourceUtils_createUserDefinedBulkInsertPartitionerWithRows
/** * Create a UserDefinedBulkInsertPartitionerRows class via reflection, * <br> * if the class name of UserDefinedBulkInsertPartitioner is configured through the HoodieWriteConfig. * * @see HoodieWriteConfig#getUserDefinedBulkInsertPartitionerClass() */ public static Option<BulkInsertPartitioner<Dataset<Row>>> createUserDefinedBulkInsertPartitionerWithRows(HoodieWriteConfig config) throws HoodieException { String bulkInsertPartitionerClass = config.getUserDefinedBulkInsertPartitionerClass(); try { return StringUtils.isNullOrEmpty(bulkInsertPartitionerClass) ? Option.empty() : Option.of((BulkInsertPartitioner) ReflectionUtils.loadClass(bulkInsertPartitionerClass, config)); } catch (Throwable e) { throw new HoodieException("Could not create UserDefinedBulkInsertPartitionerRows class " + bulkInsertPartitionerClass, e); } }
3.68
hmily_AssertUtils_notNull
/** * Not null. * * @param obj the obj */ public static void notNull(final Object obj) { if (obj == null) { throw new HmilyRuntimeException("argument invalid,Please check"); } }
3.68
flink_MessageSerializer_writeHeader
/** * Helper for serializing the header. * * @param buf The {@link ByteBuf} to serialize the header into. * @param messageType The {@link MessageType} of the message this header refers to. */ private static void writeHeader(final ByteBuf buf, final MessageType messageType) { buf.writeInt(VERSION); buf.writeInt(messageType.ordinal()); }
3.68
hbase_ProcedureWALFile_addToSize
/** * Used to update in-progress log sizes. the FileStatus will report 0 otherwise. */ void addToSize(long size) { this.logSize += size; }
3.68
hbase_UserQuotaState_setQuotas
/** * Add the quota information of the specified namespace. (This operation is part of the QuotaState * setup) */ public void setQuotas(final String namespace, Quotas quotas) { namespaceLimiters = setLimiter(namespaceLimiters, namespace, quotas); }
3.68
framework_VaadinService_createAndRegisterSession
/** * Creates and registers a new VaadinSession for this service. Assumes * proper locking has been taken care of by the caller. * * * @param request * The request which triggered session creation. * @return A new VaadinSession instance * @throws ServiceException */ private VaadinSession createAndRegisterSession(VaadinRequest request) throws ServiceException { assert ((ReentrantLock) getSessionLock(request.getWrappedSession())) .isHeldByCurrentThread() : "Session has not been locked by this thread"; VaadinSession session = createVaadinSession(request); VaadinSession.setCurrent(session); storeSession(session, request.getWrappedSession()); // Initial WebBrowser data comes from the request session.getBrowser().updateRequestDetails(request); // Initial locale comes from the request Locale locale = request.getLocale(); session.setLocale(locale); session.setConfiguration(getDeploymentConfiguration()); session.setCommunicationManager( new LegacyCommunicationManager(session)); ServletPortletHelper.initDefaultUIProvider(session, this); onVaadinSessionStarted(request, session); return session; }
3.68
hbase_HMaster_isRegionOnline
/** * @return True if region is online and scannable else false if an error or shutdown (Otherwise we * just block in here holding up all forward-progess). */ private boolean isRegionOnline(RegionInfo ri) { RetryCounter rc = null; while (!isStopped()) { RegionState rs = this.assignmentManager.getRegionStates().getRegionState(ri); if (rs != null && rs.isOpened()) { if (this.getServerManager().isServerOnline(rs.getServerName())) { return true; } } // Region is not OPEN. Optional<Procedure<MasterProcedureEnv>> optProc = this.procedureExecutor.getProcedures() .stream().filter(p -> p instanceof ServerCrashProcedure).findAny(); // TODO: Add a page to refguide on how to do repair. Have this log message point to it. // Page will talk about loss of edits, how to schedule at least the meta WAL recovery, and // then how to assign including how to break region lock if one held. LOG.warn( "{} is NOT online; state={}; ServerCrashProcedures={}. Master startup cannot " + "progress, in holding-pattern until region onlined.", ri.getRegionNameAsString(), rs, optProc.isPresent()); // Check once-a-minute. if (rc == null) { rc = new RetryCounterFactory(Integer.MAX_VALUE, 1000, 60_000).create(); } Threads.sleep(rc.getBackoffTimeAndIncrementAttempts()); } return false; }
3.68