name
stringlengths 12
178
| code_snippet
stringlengths 8
36.5k
| score
float64 3.26
3.68
|
---|---|---|
flink_Types_ENUM_rdh | /**
* Returns type information for Java enumerations. Null values are not supported.
*
* @param enumType
* enumeration class extending {@link java.lang.Enum}
*/
public static <E extends Enum<E>> TypeInformation<E> ENUM(Class<E> enumType) {
return new EnumTypeInfo<>(enumType);
} | 3.26 |
flink_Types_LIST_rdh | /**
* Returns type information for a Java {@link java.util.List}. A list must not be null. Null
* values in elements are not supported.
*
* <p>By default, lists are untyped and treated as a generic type in Flink; therefore, it is
* useful to pass type information whenever a list is used.
*
* <p><strong>Note:</strong> Flink does not preserve the concrete {@link List} type. It converts
* a list into {@link ArrayList} when copying or deserializing.
*
* @param elementType
* type information for the list's elements
*/
public static <E> TypeInformation<List<E>> LIST(TypeInformation<E> elementType) {
return new ListTypeInfo<>(elementType);
} | 3.26 |
flink_GlobalProperties_combine_rdh | // --------------------------------------------------------------------------------------------
public static GlobalProperties combine(GlobalProperties gp1, GlobalProperties gp2) {
if (gp1.isFullyReplicated()) {
if (gp2.isFullyReplicated()) {
return new GlobalProperties();
} else {
return gp2;
}
} else if (gp2.isFullyReplicated()) {
return gp1;
} else if (gp1.ordering
!= null) {
return gp1;
} else if (gp2.ordering
!= null) {
return gp2;
} else if (gp1.partitioningFields != null) {
return gp1;
} else if (gp2.partitioningFields != null) {
return gp2; } else if (gp1.uniqueFieldCombinations != null) {
return gp1;
} else if (gp2.uniqueFieldCombinations != null) {
return gp2;
} else if (gp1.getPartitioning().isPartitioned()) {
return gp1;
} else if (gp2.getPartitioning().isPartitioned()) {
return gp2;
} else {
return gp1;
}
} | 3.26 |
flink_GlobalProperties_setRangePartitioned_rdh | /**
* Set the parameters for range partition.
*
* @param ordering
* Order of the partitioned fields
* @param distribution
* The data distribution for range partition. User can supply a customized
* data distribution, also the data distribution can be null.
*/
public void setRangePartitioned(Ordering ordering, DataDistribution distribution) {
if (ordering == null) {
throw new NullPointerException();
}
this.partitioning = PartitioningProperty.RANGE_PARTITIONED;
this.ordering = ordering;
this.partitioningFields = ordering.getInvolvedIndexes();
this.distribution = distribution;
} | 3.26 |
flink_GlobalProperties_filterBySemanticProperties_rdh | /**
* Filters these GlobalProperties by the fields that are forwarded to the output as described by
* the SemanticProperties.
*
* @param props
* The semantic properties holding information about forwarded fields.
* @param input
* The index of the input.
* @return The filtered GlobalProperties
*/
public GlobalProperties filterBySemanticProperties(SemanticProperties props, int input) {
if (props == null) {
throw new NullPointerException("SemanticProperties may not be null.");
}
GlobalProperties gp = new GlobalProperties();
// filter partitioning
switch (this.partitioning) {
case RANGE_PARTITIONED :
// check if ordering is preserved
Ordering newOrdering = new Ordering();
for (int i = 0; i < this.ordering.getInvolvedIndexes().size(); i++) {
int v7 = this.ordering.getInvolvedIndexes().get(i);
FieldSet targetField = props.getForwardingTargetFields(input, v7);
if ((targetField == null) || (targetField.size() == 0)) {
// partitioning is destroyed
newOrdering = null;
break;} else {
// use any field of target fields for now. We should use something like
// field equivalence sets in the future.
if (targetField.size() > 1)
{
LOG.warn("Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index.");
}
newOrdering.appendOrdering(targetField.toArray()[0], this.ordering.getType(i), this.ordering.getOrder(i));
}
}
if (newOrdering
!= null) {
gp.partitioning = PartitioningProperty.RANGE_PARTITIONED;
gp.ordering = newOrdering;
gp.partitioningFields = newOrdering.getInvolvedIndexes();
gp.distribution = this.distribution;
}
break;
case HASH_PARTITIONED :
case ANY_PARTITIONING :
case CUSTOM_PARTITIONING :
FieldList v9 = new FieldList();
for (int sourceField : this.partitioningFields) {
FieldSet targetField = props.getForwardingTargetFields(input, sourceField);
if ((targetField == null) || (targetField.size() == 0)) {
v9 = null;
break;
} else {
// use any field of target fields for now. We should use something like
// field equivalence sets in the future.
if (targetField.size() > 1) {
LOG.warn("Found that a field is forwarded to more than one target field in " +
"semantic forwarded field information. Will only use the field with the lowest index.");
}
v9 = v9.addField(targetField.toArray()[0]);
}
}
if (v9 != null) {
gp.partitioning = this.partitioning;
gp.partitioningFields = v9;
gp.customPartitioner = this.customPartitioner;
}
break;
case FORCED_REBALANCED :
case FULL_REPLICATION :
case RANDOM_PARTITIONED :
gp.partitioning = this.partitioning;break;
default :
throw new RuntimeException("Unknown partitioning type.");
}
// filter unique field combinations
if (this.uniqueFieldCombinations != null) {
Set<FieldSet> newUniqueFieldCombinations = new HashSet<FieldSet>();
for (FieldSet fieldCombo : this.uniqueFieldCombinations) {
FieldSet v14 = new FieldSet();
for (Integer sourceField : fieldCombo) {
FieldSet targetField = props.getForwardingTargetFields(input, sourceField);
if ((targetField == null) || (targetField.size() == 0)) {
v14 = null;
break; } else {
// use any field of target fields for now. We should use something like
// field equivalence sets in the future.
if (targetField.size() > 1) {
LOG.warn("Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index.");
}
v14 = v14.addField(targetField.toArray()[0]);
}
}
if (v14 != null) {
newUniqueFieldCombinations.add(v14);
}
}
if (!newUniqueFieldCombinations.isEmpty()) {
gp.uniqueFieldCombinations = newUniqueFieldCombinations;
}
}
return gp;
} | 3.26 |
flink_GlobalProperties_isPartitionedOnFields_rdh | // --------------------------------------------------------------------------------------------
public boolean isPartitionedOnFields(FieldSet
fields) {
if (this.partitioning.isPartitionedOnKey() && fields.isValidSubset(this.partitioningFields)) {
return true;
} else if (this.uniqueFieldCombinations != null) {
for (FieldSet set : this.uniqueFieldCombinations) {
if (fields.isValidSubset(set)) {
return true;
}
}
return false;
} else {
return false;
}
} | 3.26 |
flink_GlobalProperties_setHashPartitioned_rdh | // --------------------------------------------------------------------------------------------
/**
* Sets this global properties to represent a hash partitioning.
*
* @param partitionedFields
* The key fields on which the data is hash partitioned.
*/
public void setHashPartitioned(FieldList partitionedFields) {
if (partitionedFields == null) {
throw new NullPointerException();
}
this.partitioning = PartitioningProperty.HASH_PARTITIONED;
this.partitioningFields = partitionedFields;
this.ordering = null; } | 3.26 |
flink_GlobalProperties_isTrivial_rdh | /**
* Checks, if the properties in this object are trivial, i.e. only standard values.
*/
public boolean isTrivial() {
return partitioning == PartitioningProperty.RANDOM_PARTITIONED;
} | 3.26 |
flink_GlobalProperties_reset_rdh | /**
* This method resets the properties to a state where no properties are given.
*/
public void reset() {
this.partitioning = PartitioningProperty.RANDOM_PARTITIONED;
this.ordering = null;
this.partitioningFields = null;
} | 3.26 |
flink_GlobalProperties_hashCode_rdh | // ------------------------------------------------------------------------
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + (partitioning == null ? 0 : partitioning.ordinal());
result = (prime * result) + (partitioningFields == null ? 0 : partitioningFields.hashCode());
result = (prime * result) + (ordering == null ? 0 : ordering.hashCode());return result;
} | 3.26 |
flink_HighAvailabilityMode_fromConfig_rdh | /**
* Return the configured {@link HighAvailabilityMode}.
*
* @param config
* The config to parse
* @return Configured recovery mode or {@link HighAvailabilityMode#NONE} if not configured.
*/
public static HighAvailabilityMode fromConfig(Configuration config)
{
String haMode = config.getValue(HighAvailabilityOptions.HA_MODE);
if (haMode == null) {
return HighAvailabilityMode.NONE;
} else if (haMode.equalsIgnoreCase(ConfigConstants.DEFAULT_RECOVERY_MODE)) {
// Map old default to new default
return HighAvailabilityMode.NONE;
} else {try {
return HighAvailabilityMode.valueOf(haMode.toUpperCase());
} catch (IllegalArgumentException e) {
return FACTORY_CLASS;
}
}} | 3.26 |
flink_HighAvailabilityMode_isHighAvailabilityModeActivated_rdh | /**
* Returns true if the defined recovery mode supports high availability.
*
* @param configuration
* Configuration which contains the recovery mode
* @return true if high availability is supported by the recovery mode, otherwise false
*/
public static boolean isHighAvailabilityModeActivated(Configuration configuration) {
HighAvailabilityMode mode = fromConfig(configuration);
return mode.haActive;
} | 3.26 |
flink_TextElement_text_rdh | /**
* Creates a simple block of text.
*
* @param text
* a simple block of text
* @return block of text
*/
public static TextElement text(String text) {
return new TextElement(text, Collections.emptyList());
} | 3.26 |
flink_TextElement_wrap_rdh | /**
* Wraps a list of {@link InlineElement}s into a single {@link TextElement}.
*/
public static InlineElement wrap(InlineElement... elements) {
return text(Strings.repeat("%s", elements.length), elements);
} | 3.26 |
flink_ValueTypeInfo_hashCode_rdh | // --------------------------------------------------------------------------------------------
@Override
public int hashCode() {
return this.type.hashCode();
} | 3.26 |
flink_ValueTypeInfo_createCopyableValueSerializer_rdh | // utility method to summon the necessary bound
private static <X extends CopyableValue<X>> CopyableValueSerializer<X> createCopyableValueSerializer(Class<X> clazz) {
return new CopyableValueSerializer<X>(clazz);
} | 3.26 |
flink_ValueTypeInfo_getValueTypeInfo_rdh | // --------------------------------------------------------------------------------------------
@PublicEvolving
static <X extends Value> TypeInformation<X> getValueTypeInfo(Class<X> typeClass) {
if (Value.class.isAssignableFrom(typeClass) && (!typeClass.equals(Value.class))) {
return new ValueTypeInfo<X>(typeClass);
} else {
throw new InvalidTypesException("The given class is no subclass of " + Value.class.getName());
}
} | 3.26 |
flink_ExtractionUtils_primitiveToWrapper_rdh | /**
* Returns the boxed type of a primitive type.
*/
static Type primitiveToWrapper(Type type) {
if (type instanceof Class) {
return primitiveToWrapper(((Class<?>) (type)));
}
return type;
} | 3.26 |
flink_ExtractionUtils_getStructuredField_rdh | /**
* Returns the field of a structured type. The logic is as broad as possible to support both
* Java and Scala in different flavors.
*/
public static Field getStructuredField(Class<?> clazz, String fieldName) {
final String normalizedFieldName = fieldName.toUpperCase();
final List<Field> fields = m3(clazz);for (Field field : fields) {
if (field.getName().toUpperCase().equals(normalizedFieldName)) {
return field;
}
}
throw extractionError("Could not find a field named '%s' in class '%s' for structured type.", fieldName, clazz.getName());
} | 3.26 |
flink_ExtractionUtils_isInvokable_rdh | /**
* Checks whether a method/constructor can be called with the given argument classes. This
* includes type widening and vararg. {@code null} is a wildcard.
*
* <p>E.g., {@code (int.class, int.class)} matches {@code f(Object...), f(int, int), f(Integer,
* Object)} and so forth.
*/
public static boolean isInvokable(Executable executable, Class<?>... classes) {
final int m = executable.getModifiers();
if (!Modifier.isPublic(m)) {
return false;
}
final int paramCount = executable.getParameterCount();
final int v2 = classes.length;
// check for enough classes for each parameter
if (((!executable.isVarArgs()) && (v2 != paramCount)) || (executable.isVarArgs() && (v2 < (paramCount - 1)))) {
return false;
}
int currentClass = 0;
for (int currentParam = 0; currentParam < paramCount; currentParam++) {
final Class<?> param = executable.getParameterTypes()[currentParam];
// last parameter is a vararg that needs to consume remaining classes
if ((currentParam == (paramCount - 1))
&& executable.isVarArgs()) {
final Class<?> v6 = executable.getParameterTypes()[currentParam].getComponentType();
// we have more than 1 classes left so the vararg needs to consume them all
if ((v2 - currentClass) > 1) {
while ((currentClass < v2) && ExtractionUtils.isAssignable(classes[currentClass], v6, true)) {
currentClass++;
}
} else if ((currentClass < v2) && (parameterMatches(classes[currentClass], param)
|| parameterMatches(classes[currentClass], v6))) {
currentClass++;
}
} else if (parameterMatches(classes[currentClass], param)) {
currentClass++;
}
}
// check if all classes have been consumed
return currentClass == v2;
} | 3.26 |
flink_ExtractionUtils_validateStructuredClass_rdh | /**
* Validates the characteristics of a class for a {@link StructuredType} such as accessibility.
*/
public static void
validateStructuredClass(Class<?> clazz) {
final int m = clazz.getModifiers();
if (Modifier.isAbstract(m)) {
throw extractionError("Class '%s' must not be abstract.", clazz.getName());
}
if (!Modifier.isPublic(m)) {
throw extractionError("Class '%s' is not public.", clazz.getName());
}
if ((clazz.getEnclosingClass() != null) && ((clazz.getDeclaringClass() == null) || (!Modifier.isStatic(m)))) {
throw extractionError("Class '%s' is a not a static, globally accessible class.", clazz.getName());
}
} | 3.26 |
flink_ExtractionUtils_extractConstructorParameterNames_rdh | /**
* Extracts ordered parameter names from a constructor that takes all of the given fields with
* matching (possibly primitive and lenient) type and name.
*/
@Nullable
private static List<String> extractConstructorParameterNames(Constructor<?> constructor, List<Field> fields) {
final Type[] parameterTypes = constructor.getGenericParameterTypes();
List<String> parameterNames = extractExecutableNames(constructor);if (parameterNames == null) {
return null;
}
final Map<String, Field> fieldMap = fields.stream().collect(Collectors.toMap(f -> normalizeAccessorName(f.getName()), Function.identity()));
// check that all fields are represented in the parameters of the constructor
final List<String> fieldNames = new ArrayList<>();
for (int i = 0; i < parameterNames.size(); i++) {
final String parameterName = normalizeAccessorName(parameterNames.get(i));
final Field field = fieldMap.get(parameterName);
if (field == null) {return null;
}
final Type fieldType = field.getGenericType();
final Type parameterType = parameterTypes[i];
// we are tolerant here because frameworks such as Avro accept a boxed type even though
// the field is primitive
if (!primitiveToWrapper(parameterType).equals(primitiveToWrapper(fieldType))) {
return null;
}
fieldNames.add(field.getName());
}
return fieldNames;
} | 3.26 |
flink_ExtractionUtils_collectMethods_rdh | // --------------------------------------------------------------------------------------------
// Methods shared across packages
// --------------------------------------------------------------------------------------------
/**
* Collects methods of the given name.
*/
public static List<Method> collectMethods(Class<?> function,
String methodName) {
return // for deterministic order
Arrays.stream(function.getMethods()).filter(method -> method.getName().equals(methodName)).sorted(Comparator.comparing(Method::toString)).collect(Collectors.toList());
} | 3.26 |
flink_ExtractionUtils_createMethodSignatureString_rdh | /**
* Creates a method signature string like {@code int eval(Integer, String)}.
*/
public static String createMethodSignatureString(String methodName, Class<?>[] parameters, @Nullable
Class<?> returnType) {
final StringBuilder builder = new StringBuilder();
if (returnType != null) {
builder.append(returnType.getCanonicalName()).append(" ");
}
builder.append(methodName).append(Stream.of(parameters).map(parameter -> {
// in case we don't know the parameter at this location
// (i.e. for accumulators)
if (parameter == null) {
return "_";
} else {
return parameter.getCanonicalName();
}
}).collect(Collectors.joining(", ", "(", ")")));
return builder.toString();
} | 3.26 |
flink_ExtractionUtils_createRawType_rdh | /**
* Creates a raw data type.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
static DataType createRawType(DataTypeFactory typeFactory, @Nullable
Class<? extends TypeSerializer<?>> rawSerializer, @Nullable
Class<?> conversionClass) {
if (rawSerializer != null) {
return DataTypes.RAW(((Class) (createConversionClass(conversionClass))), instantiateRawSerializer(rawSerializer));
}
return typeFactory.createRawDataType(createConversionClass(conversionClass));
} | 3.26 |
flink_ExtractionUtils_extractAssigningConstructor_rdh | /**
* Checks whether the given constructor takes all of the given fields with matching (possibly
* primitive) type and name. An assigning constructor can define the order of fields.
*/
@Nullable
public static AssigningConstructor
extractAssigningConstructor(Class<?> clazz, List<Field> fields) {
AssigningConstructor foundConstructor = null;
for (Constructor<?> constructor : clazz.getDeclaredConstructors()) {
final boolean qualifyingConstructor = Modifier.isPublic(constructor.getModifiers()) && (constructor.getParameterCount() == fields.size());
if (!qualifyingConstructor) {
continue;
}
final List<String> parameterNames = extractConstructorParameterNames(constructor, fields);
if (parameterNames != null) {
if (foundConstructor != null) {
throw extractionError("Multiple constructors found that assign all fields for class '%s'.", clazz.getName());
}
foundConstructor = new AssigningConstructor(constructor, parameterNames);
}
}
return foundConstructor;
} | 3.26 |
flink_ExtractionUtils_getStructuredFieldGetter_rdh | /**
* Checks for a field getter of a structured type. The logic is as broad as possible to support
* both Java and Scala in different flavors.
*/
public static Optional<Method> getStructuredFieldGetter(Class<?> clazz, Field field) {
final String normalizedFieldName = normalizeAccessorName(field.getName());
final List<Method> methods = collectStructuredMethods(clazz);
for (Method method : methods) {
// check name:
// get<Name>()
// is<Name>()
// <Name>() for Scala
final String normalizedMethodName = normalizeAccessorName(method.getName());
final boolean hasName = (normalizedMethodName.equals("GET" + normalizedFieldName) || normalizedMethodName.equals("IS" + normalizedFieldName)) || normalizedMethodName.equals(normalizedFieldName);
if (!hasName) {
continue;
}
// check return type:
// equal to field type
final Type returnType = method.getGenericReturnType();
final boolean
hasReturnType = returnType.equals(field.getGenericType());
if (!hasReturnType) {
continue;
}
// check parameters:
// no parameters
final boolean hasNoParameters = method.getParameterCount() == 0;
if (!hasNoParameters) {
continue;}
// matching getter found
return Optional.of(method);
}
// no getter found
return Optional.empty();
} | 3.26 |
flink_ExtractionUtils_collectAnnotationsOfClass_rdh | /**
* Collects all annotations of the given type defined in the current class or superclasses.
* Duplicates are ignored.
*/
static <T extends Annotation> Set<T> collectAnnotationsOfClass(Class<T> annotation, Class<?> annotatedClass) {
final List<Class<?>> classHierarchy = new ArrayList<>();
Class<?> v52 = annotatedClass;
while (v52 != null)
{
classHierarchy.add(v52);
v52 = v52.getSuperclass();
}
// convert to top down
Collections.reverse(classHierarchy);
return classHierarchy.stream().flatMap(c -> Stream.of(c.getAnnotationsByType(annotation))).collect(Collectors.toCollection(LinkedHashSet::new));
} | 3.26 |
flink_ExtractionUtils_collectStructuredMethods_rdh | /**
* Collects all methods that qualify as methods of a {@link StructuredType}.
*/
static List<Method> collectStructuredMethods(Class<?> clazz) {
final List<Method> methods = new ArrayList<>();
while (clazz != Object.class) {
final Method[] declaredMethods = clazz.getDeclaredMethods();
Stream.of(declaredMethods).filter(field -> { final int m = field.getModifiers();
return (Modifier.isPublic(m) && (!Modifier.isNative(m))) && (!Modifier.isAbstract(m));
}).forEach(methods::add);
clazz = clazz.getSuperclass();
}
return methods;
} | 3.26 |
flink_ExtractionUtils_resolveVariable_rdh | /**
* Resolves a {@link TypeVariable} using the given type hierarchy if possible.
*/
static Type resolveVariable(List<Type> typeHierarchy, TypeVariable<?> variable) {
// iterate through hierarchy from top to bottom until type variable gets a non-variable
// assigned
for (int i = typeHierarchy.size() - 1; i >= 0; i--) {
final Type currentType = typeHierarchy.get(i);
if (currentType instanceof ParameterizedType) {final Type resolvedType = m2(variable, ((ParameterizedType) (currentType)));
if (resolvedType instanceof TypeVariable) {
// follow type variables transitively
variable = ((TypeVariable<?>) (resolvedType));
} else if (resolvedType != null) {
return resolvedType;
}
}
}
// unresolved variable
return variable;
} | 3.26 |
flink_ExtractionUtils_isAssignable_rdh | // --------------------------------------------------------------------------------------------
// Class Assignment and Boxing
//
// copied from o.a.commons.lang3.ClassUtils (commons-lang3:3.3.2)
// --------------------------------------------------------------------------------------------
/**
* Checks if one {@code Class} can be assigned to a variable of another {@code Class}.
*
* <p>Unlike the {@link Class#isAssignableFrom(java.lang.Class)} method, this method takes into
* account widenings of primitive classes and {@code null}s.
*
* <p>Primitive widenings allow an int to be assigned to a long, float or double. This method
* returns the correct result for these cases.
*
* <p>{@code Null} may be assigned to any reference type. This method will return {@code true}
* if {@code null} is passed in and the toClass is non-primitive.
*
* <p>Specifically, this method tests whether the type represented by the specified {@code Class} parameter can be converted to the type represented by this {@code Class} object via an
* identity conversion widening primitive or widening reference conversion. See <em><a
* href="http://docs.oracle.com/javase/specs/">The Java Language Specification</a></em>,
* sections 5.1.1, 5.1.2 and 5.1.4 for details.
*
* @param cls
* the Class to check, may be null
* @param toClass
* the Class to try to assign into, returns false if null
* @param autoboxing
* whether to use implicit autoboxing/unboxing between primitives and wrappers
* @return {@code true} if assignment possible
*/
public static boolean isAssignable(Class<?> cls, final Class<?> toClass, final boolean autoboxing) {
if (toClass == null) {
return false;
}
// have to check for null, as isAssignableFrom doesn't
if (cls == null) {
return !toClass.isPrimitive();
}
// autoboxing:
if (autoboxing) {
if (cls.isPrimitive() && (!toClass.isPrimitive())) {
cls = primitiveToWrapper(cls);
if (cls == null) {
return false;
}
}
if (toClass.isPrimitive() && (!cls.isPrimitive())) {
cls = wrapperToPrimitive(cls);
if (cls == null) {
return false;
}
}
}
if (cls.equals(toClass)) {
return true;
}
if (cls.isPrimitive()) {
if (!toClass.isPrimitive()) {
return false;
}
if (Integer.TYPE.equals(cls)) {
return (Long.TYPE.equals(toClass) || Float.TYPE.equals(toClass)) || Double.TYPE.equals(toClass);
}
if (Long.TYPE.equals(cls)) {return Float.TYPE.equals(toClass) || Double.TYPE.equals(toClass);
}
if (Boolean.TYPE.equals(cls)) {
return false;
}
if (Double.TYPE.equals(cls)) {
return false;
}
if (Float.TYPE.equals(cls)) {
return Double.TYPE.equals(toClass);
}
if (Character.TYPE.equals(cls)) {
return ((Integer.TYPE.equals(toClass) ||
Long.TYPE.equals(toClass)) || Float.TYPE.equals(toClass)) || Double.TYPE.equals(toClass);
} if (Short.TYPE.equals(cls)) {
return ((Integer.TYPE.equals(toClass) || Long.TYPE.equals(toClass))
|| Float.TYPE.equals(toClass))
|| Double.TYPE.equals(toClass);
}
if (Byte.TYPE.equals(cls)) {
return (((Short.TYPE.equals(toClass) || Integer.TYPE.equals(toClass)) || Long.TYPE.equals(toClass)) || Float.TYPE.equals(toClass)) || Double.TYPE.equals(toClass);
}
// should never get here
return false;
}
return toClass.isAssignableFrom(cls);
} | 3.26 |
flink_ExtractionUtils_isStructuredFieldMutable_rdh | /**
* Checks if a field is mutable or immutable. Returns {@code true} if the field is properly
* mutable. Returns {@code false} if it is properly immutable.
*/
static boolean isStructuredFieldMutable(Class<?> clazz, Field field) {
final int m = field.getModifiers();
// field is immutable
if (Modifier.isFinal(m)) {
return false;
}
// field is directly mutable
if (Modifier.isPublic(m)) {
return true;
}
// field has setters by which it is mutable
if (getStructuredFieldSetter(clazz, field).isPresent()) {
return true;
}
throw extractionError("Field '%s' of class '%s' is mutable but is neither publicly accessible nor does it have " + "a corresponding setter method.", field.getName(), clazz.getName());
} | 3.26 |
flink_ExtractionUtils_m3_rdh | /**
* Returns the fields of a class for a {@link StructuredType}.
*/
static List<Field> m3(Class<?> clazz) {
final List<Field> fields = new
ArrayList<>();
while (clazz != Object.class) {
final Field[] declaredFields = clazz.getDeclaredFields();
Stream.of(declaredFields).filter(field -> {
final int m = field.getModifiers();
return (!Modifier.isStatic(m)) && (!Modifier.isTransient(m)); }).forEach(fields::add);
clazz = clazz.getSuperclass();
}
return fields;
} | 3.26 |
flink_ExtractionUtils_validateStructuredFieldReadability_rdh | /**
* Validates if a field is properly readable either directly or through a getter.
*/
static void validateStructuredFieldReadability(Class<?> clazz, Field field) {
// field is accessible
if (isStructuredFieldDirectlyReadable(field)) {
return;
}
// field needs a getter
if (!getStructuredFieldGetter(clazz, field).isPresent()) {
throw extractionError("Field '%s' of class '%s' is neither publicly accessible nor does it have " + "a corresponding getter method.", field.getName(), clazz.getName());
}
} | 3.26 |
flink_ExtractionUtils_collectAnnotationsOfMethod_rdh | /**
* Collects all annotations of the given type defined in the given method. Duplicates are
* ignored.
*/
static <T extends Annotation> Set<T> collectAnnotationsOfMethod(Class<T> annotation, Method annotatedMethod) {
return new LinkedHashSet<>(Arrays.asList(annotatedMethod.getAnnotationsByType(annotation)));
} | 3.26 |
flink_ExtractionUtils_isStructuredFieldDirectlyReadable_rdh | /**
* Checks whether a field is directly readable without a getter.
*/
public static boolean isStructuredFieldDirectlyReadable(Field field) {
final int m = field.getModifiers();
// field is directly readable
return Modifier.isPublic(m);
} | 3.26 |
flink_ExtractionUtils_getStructuredFieldSetter_rdh | /**
* Checks for a field setters of a structured type. The logic is as broad as possible to support
* both Java and Scala in different flavors.
*/
public static Optional<Method> getStructuredFieldSetter(Class<?>
clazz, Field field) {
final String normalizedFieldName = normalizeAccessorName(field.getName());
final List<Method> methods = collectStructuredMethods(clazz);
for (Method method : methods) {
// check name:
// set<Name>(type)
// <Name>(type)
// <Name>_$eq(type) for Scala
final String normalizedMethodName = normalizeAccessorName(method.getName());
final
boolean hasName = (normalizedMethodName.equals("SET" + normalizedFieldName) || normalizedMethodName.equals(normalizedFieldName)) || normalizedMethodName.equals(normalizedFieldName + "$EQ");
if (!hasName) {
continue;
}
// check return type:
// void or the declaring class
final Class<?> returnType = method.getReturnType();
final boolean hasReturnType
= (returnType == Void.TYPE) || (returnType == clazz);
if (!hasReturnType) {
continue;
}
// check parameters:
// one parameter that has the same (or primitive) type of the field
final boolean hasParameter = (method.getParameterCount() == 1) && (method.getGenericParameterTypes()[0].equals(field.getGenericType()) || primitiveToWrapper(method.getGenericParameterTypes()[0]).equals(field.getGenericType()));
if (!hasParameter) {
continue;
}
// matching setter found
return Optional.of(method);
}// no setter found
return Optional.empty();
} | 3.26 |
flink_ExtractionUtils_collectTypeHierarchy_rdh | /**
* Collects the partially ordered type hierarchy (i.e. all involved super classes and super
* interfaces) of the given type.
*/
static List<Type> collectTypeHierarchy(Type type) {
Type currentType = type;Class<?> currentClass = m1(type);
final List<Type> typeHierarchy = new ArrayList<>();while (currentClass != null) {
// collect type
typeHierarchy.add(currentType);
// collect super interfaces
for (Type genericInterface : currentClass.getGenericInterfaces()) {
final Class<?> interfaceClass = m1(genericInterface);
if (interfaceClass != null) {
typeHierarchy.addAll(collectTypeHierarchy(genericInterface));
}
}
currentType = currentClass.getGenericSuperclass();
currentClass = m1(currentType);
} return typeHierarchy;
} | 3.26 |
flink_ExtractionUtils_extractMethodParameterNames_rdh | /**
* Extracts the parameter names of a method if possible.
*/
@Nullable
static List<String> extractMethodParameterNames(Method method) {
return extractExecutableNames(method);
} | 3.26 |
flink_ExtractionUtils_classForName_rdh | /**
* Similar to {@link Class#forName(String, boolean, ClassLoader)} but resolves primitive names
* as well.
*/
public static Class<?> classForName(String name, boolean initialize, ClassLoader classLoader) throws ClassNotFoundException {
if (primitiveNameMap.containsKey(name)) {
return primitiveNameMap.get(name);
}
return Class.forName(name, initialize, classLoader);
} | 3.26 |
flink_ExtractionUtils_m1_rdh | /**
* Converts a {@link Type} to {@link Class} if possible, {@code null} otherwise.
*/
@Nullable
static Class<?> m1(Type type) {
if (type instanceof Class) {
return ((Class<?>) (type));
} else if (type instanceof ParameterizedType) {
// this is always a class
return ((Class<?>) (((ParameterizedType) (type)).getRawType()));
}
// unsupported: generic arrays, type variables, wildcard types
return null;
} | 3.26 |
flink_ExtractionUtils_isStructuredFieldDirectlyWritable_rdh | /**
* Checks whether a field is directly writable without a setter or constructor.
*/
public static boolean isStructuredFieldDirectlyWritable(Field field) {
final int m = field.getModifiers();
// field is immutable
if (Modifier.isFinal(m)) {
return false;
}
// field is directly writable
return Modifier.isPublic(m);
} | 3.26 |
flink_ExtractionUtils_extractSimpleGeneric_rdh | /**
* A minimal version to extract a generic parameter from a given class.
*
* <p>This method should only be used for very specific use cases, in most cases {@link DataTypeExtractor#extractFromGeneric(DataTypeFactory, Class, int, Type)} should be more
* appropriate.
*/
public static Optional<Class<?>> extractSimpleGeneric(Class<?> baseClass, Class<?> clazz, int pos) {
try {
if (clazz.getSuperclass()
!= baseClass) {
return Optional.empty();
}
final Type t = ((ParameterizedType) (clazz.getGenericSuperclass())).getActualTypeArguments()[pos];
return Optional.ofNullable(m1(t));
} catch (Exception unused) {
return Optional.empty();
}
} | 3.26 |
flink_ExtractionUtils_validateStructuredSelfReference_rdh | /**
* Validates if a given type is not already contained in the type hierarchy of a structured
* type.
*
* <p>Otherwise this would lead to infinite data type extraction cycles.
*/
static void validateStructuredSelfReference(Type t, List<Type> typeHierarchy) {
final Class<?>
clazz = m1(t);
if ((((clazz != null) && (!clazz.isInterface())) && (clazz != Object.class)) && typeHierarchy.contains(t)) {
throw extractionError("Cyclic reference detected for class '%s'. Attributes of structured types must not " + "(transitively) reference the structured type itself.", clazz.getName());
}
} | 3.26 |
flink_ExtractionUtils_m0_rdh | /**
* Checks for an invokable constructor matching the given arguments.
*
* @see #isInvokable(Executable, Class[])
*/
public static boolean m0(Class<?> clazz, Class<?>... classes) {
for (Constructor<?> constructor : clazz.getDeclaredConstructors()) {
if (isInvokable(constructor, classes)) {
return true;
}
}
return false;
} | 3.26 |
flink_ExtractionUtils_extractionError_rdh | /**
* Helper method for creating consistent exceptions during extraction.
*/
static ValidationException extractionError(Throwable cause, String message, Object... args) {
return new ValidationException(String.format(message, args), cause);
} | 3.26 |
flink_ListSerializer_isImmutableType_rdh | // ------------------------------------------------------------------------
// Type Serializer implementation
// ------------------------------------------------------------------------
@Override
public boolean isImmutableType() {
return false;
} | 3.26 |
flink_ListSerializer_getElementSerializer_rdh | // ------------------------------------------------------------------------
// ListSerializer specific properties
// ------------------------------------------------------------------------
/**
* Gets the serializer for the elements of the list.
*
* @return The serializer for the elements of the list
*/public TypeSerializer<T> getElementSerializer() {
return elementSerializer;
} | 3.26 |
flink_ListSerializer_snapshotConfiguration_rdh | // --------------------------------------------------------------------------------------------
// Serializer configuration snapshot & compatibility
// --------------------------------------------------------------------------------------------
@Override
public TypeSerializerSnapshot<List<T>> snapshotConfiguration() {
return new ListSerializerSnapshot<>(this);
} | 3.26 |
flink_ListSerializer_equals_rdh | // --------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
return (obj == this) || (((obj != null) && (obj.getClass() == getClass())) && elementSerializer.equals(((ListSerializer<?>) (obj)).elementSerializer));} | 3.26 |
flink_BaseMappingExtractor_verifyMappingForMethod_rdh | /**
* Checks if the given method can be called and returns what hints declare.
*/
private void verifyMappingForMethod(Method method, Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappingsPerMethod, MethodVerification verification) {
collectedMappingsPerMethod.forEach((signature, result) -> verification.verify(method, signature.toClass(), result.toClass()));
} | 3.26 |
flink_BaseMappingExtractor_extractResultMappings_rdh | /**
* Extracts mappings from signature to result (either accumulator or output) for the entire
* function. Verifies if the extracted inference matches with the implementation.
*
* <p>For example, from {@code (INT, BOOLEAN, ANY) -> INT}. It does this by going through all
* implementation methods and collecting all "per-method" mappings. The function mapping is the
* union of all "per-method" mappings.
*/
protected Map<FunctionSignatureTemplate, FunctionResultTemplate> extractResultMappings(ResultExtraction resultExtraction, Function<FunctionTemplate, FunctionResultTemplate> accessor,
MethodVerification verification) {
final Set<FunctionTemplate> global = extractGlobalFunctionTemplates();
final Set<FunctionResultTemplate> globalResultOnly = findResultOnlyTemplates(global, accessor);
// for each method find a signature that maps to results
final Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings = new LinkedHashMap<>();
final List<Method> methods = collectMethods(methodName);
if (methods.size() == 0) {
throw extractionError("Could not find a publicly accessible method named '%s'.", methodName);
}
for (Method method
: methods) {
try {
final Method correctMethod = correctVarArgMethod(method);
final Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappingsPerMethod = collectMethodMappings(correctMethod, global, globalResultOnly, resultExtraction, accessor);
// check if the method can be called
verifyMappingForMethod(correctMethod, collectedMappingsPerMethod, verification);
// check if method strategies conflict with function strategies
collectedMappingsPerMethod.forEach((signature, result) -> putMapping(collectedMappings, signature, result));
} catch (Throwable t) {
throw extractionError(t, "Unable to extract a type inference from method:\n%s", method.toString());
}
}
return collectedMappings;
} | 3.26 |
flink_BaseMappingExtractor_correctVarArgMethod_rdh | /**
* Special case for Scala which generates two methods when using var-args (a {@code Seq < String
* >} and {@code String...}). This method searches for the Java-like variant.
*/
static Method correctVarArgMethod(Method method) {
final int paramCount = method.getParameterCount();
final Class<?>[] paramClasses = method.getParameterTypes();
if ((paramCount > 0) && paramClasses[paramCount - 1].getName().equals("scala.collection.Seq")) {
final Type[] paramTypes = method.getGenericParameterTypes();
final ParameterizedType seqType = ((ParameterizedType) (paramTypes[paramCount - 1]));
final Type varArgType = seqType.getActualTypeArguments()[0];
return ExtractionUtils.collectMethods(method.getDeclaringClass(), method.getName()).stream().filter(Method::isVarArgs).filter(candidate -> candidate.getParameterCount() == paramCount).filter(candidate -> {
final Type[] candidateParamTypes = candidate.getGenericParameterTypes();
for (int i = 0; i < (paramCount - 1); i++) {
if (candidateParamTypes[i] != paramTypes[i]) {
return false;
}
}
final Class<?> candidateVarArgType = candidate.getParameterTypes()[paramCount - 1];
return candidateVarArgType.isArray() && // check for Object is needed in case of Scala primitives
// (e.g. Int)
((varArgType == Object.class) || (candidateVarArgType.getComponentType() == varArgType));
}).findAny().orElse(method);
}return method;
} | 3.26 |
flink_BaseMappingExtractor_putUniqueResultMappings_rdh | /**
* Result only template with explicit or extracted signatures.
*/
private void putUniqueResultMappings(Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings, @Nullable
FunctionResultTemplate uniqueResult, Set<FunctionSignatureTemplate> signatureOnly, Method method) {
if (uniqueResult == null) {
return;}
// input only templates are valid everywhere if they don't exist fallback to extraction
if (!signatureOnly.isEmpty()) {
signatureOnly.forEach(s ->
putMapping(collectedMappings, s, uniqueResult));
} else {
putMapping(collectedMappings, signatureExtraction.extract(this, method), uniqueResult);
}
} | 3.26 |
flink_BaseMappingExtractor_createParameterSignatureExtraction_rdh | // --------------------------------------------------------------------------------------------
// Context sensitive extraction and verification logic
// --------------------------------------------------------------------------------------------
/**
* Extraction that uses the method parameters for producing a {@link FunctionSignatureTemplate}.
*/
static SignatureExtraction createParameterSignatureExtraction(int offset) {
return (extractor, method) -> {
final List<FunctionArgumentTemplate> parameterTypes = extractArgumentTemplates(extractor.typeFactory, extractor.getFunctionClass(), method, offset);final String[] argumentNames = extractArgumentNames(method, offset);
return FunctionSignatureTemplate.of(parameterTypes, method.isVarArgs(), argumentNames);
};
} | 3.26 |
flink_BaseMappingExtractor_putExtractedResultMappings_rdh | /**
* Missing result by extraction with explicit or extracted signatures.
*/
private void putExtractedResultMappings(Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings, Set<FunctionSignatureTemplate> inputOnly, ResultExtraction resultExtraction, Method method) {
if (!collectedMappings.isEmpty()) {
return;
}
final FunctionResultTemplate result = resultExtraction.extract(this, method);
// input only validators are valid everywhere if they don't exist fallback to extraction
if (!inputOnly.isEmpty()) {
inputOnly.forEach(signature -> putMapping(collectedMappings, signature, result));
} else {
final FunctionSignatureTemplate signature = signatureExtraction.extract(this, method);
putMapping(collectedMappings, signature, result);
}
} | 3.26 |
flink_BaseMappingExtractor_putExplicitMappings_rdh | // --------------------------------------------------------------------------------------------
// Helper methods (ordered by invocation order)
// --------------------------------------------------------------------------------------------
/**
* Explicit mappings with complete signature to result declaration.
*/
private void putExplicitMappings(Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings, Set<FunctionTemplate> explicitMappings, Set<FunctionSignatureTemplate> signatureOnly, Function<FunctionTemplate, FunctionResultTemplate> accessor) {
explicitMappings.forEach(t -> {
// signature templates are valid everywhere and are added to the explicit
// mapping
Stream.concat(signatureOnly.stream(), Stream.of(t.getSignatureTemplate())).forEach(v -> putMapping(collectedMappings, v, accessor.apply(t)));
});
} | 3.26 |
flink_BaseMappingExtractor_collectMethodMappings_rdh | /**
* Extracts mappings from signature to result (either accumulator or output) for the given
* method. It considers both global hints for the entire function and local hints just for this
* method.
*
* <p>The algorithm aims to find an input signature for every declared result. If no result is
* declared, it will be extracted. If no input signature is declared, it will be extracted.
*/
private Map<FunctionSignatureTemplate, FunctionResultTemplate> collectMethodMappings(Method method, Set<FunctionTemplate> global, Set<FunctionResultTemplate> globalResultOnly, ResultExtraction resultExtraction, Function<FunctionTemplate, FunctionResultTemplate> accessor) {final Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappingsPerMethod =
new LinkedHashMap<>();
final Set<FunctionTemplate> local = extractLocalFunctionTemplates(method);
final Set<FunctionResultTemplate> localResultOnly = findResultOnlyTemplates(local, accessor);final Set<FunctionTemplate> explicitMappings = findResultMappingTemplates(global, local, accessor);
final FunctionResultTemplate resultOnly = findResultOnlyTemplate(globalResultOnly, localResultOnly, explicitMappings, accessor, getHintType());
final Set<FunctionSignatureTemplate> inputOnly = findInputOnlyTemplates(global, local, accessor);
// add all explicit mappings because they contain complete signatures
putExplicitMappings(collectedMappingsPerMethod, explicitMappings,
inputOnly, accessor);
// add result only template with explicit or extracted signatures
putUniqueResultMappings(collectedMappingsPerMethod, resultOnly, inputOnly, method);
// handle missing result by extraction with explicit or extracted signatures
putExtractedResultMappings(collectedMappingsPerMethod, inputOnly, resultExtraction, method);
return collectedMappingsPerMethod;} | 3.26 |
flink_SinkFunctionProvider_of_rdh | /**
* Helper method for creating a SinkFunction provider with a provided sink parallelism.
*/
static SinkFunctionProvider of(SinkFunction<RowData> sinkFunction, @Nullable
Integer sinkParallelism) {
return new SinkFunctionProvider() {
@Override
public SinkFunction<RowData> createSinkFunction() {
return sinkFunction;
}
@Override
public Optional<Integer> getParallelism() {
return Optional.ofNullable(sinkParallelism); }
};
} | 3.26 |
flink_LeadLagAggFunction_retractExpressions_rdh | // TODO hack, use the current input reset the buffer value.
@Override
public Expression[] retractExpressions() {
return new Expression[]{ existDefaultValue ? cast(operand(2), typeLiteral(getResultType())) : literal(null, getResultType()) };
} | 3.26 |
flink_ClassDataTypeConverter_extractDataType_rdh | /**
* Returns the clearly identifiable data type if possible. For example, {@link Long} can be
* expressed as {@link DataTypes#BIGINT()}. However, for example, {@link Row} cannot be
* extracted as information about the fields is missing. Or {@link BigDecimal} needs to be
* mapped from a variable precision/scale to constant ones.
*/
@SuppressWarnings("unchecked") public static Optional<DataType> extractDataType(Class<?> clazz) {
// prefer BYTES over ARRAY<TINYINT> for byte[]
if (clazz == byte[].class) {
return Optional.of(DataTypes.BYTES());
}
if (clazz.isArray()) {
return extractDataType(clazz.getComponentType()).map(DataTypes::ARRAY);
}
if (TableSymbol.class.isAssignableFrom(clazz)) {
return Optional.of(new AtomicDataType(new SymbolType<>(), clazz));
}
return Optional.ofNullable(defaultDataTypes.get(clazz.getName()));
} | 3.26 |
flink_HiveShowTableUtils_showCreateTable_rdh | /**
* Construct the string for SHOW CREATE TABLE statement. Most of the logic is from Hive's.
*/
public static String showCreateTable(ObjectPath tablePath, Table tbl) {
boolean needsLocation;
String showCreateTableString;
List<String> duplicateProps = new ArrayList<>();
needsLocation = doesTableNeedLocation(tbl);
if (tbl.isView()) {showCreateTableString = (("CREATE VIEW `" + tablePath) + "` AS ") + tbl.getViewExpandedText();
} else {
StringBuilder createTabStringBuilder
= new StringBuilder();
// For cases where the table is temporary
String tblTemp = "";
if (tbl.isTemporary()) {
duplicateProps.add("TEMPORARY");
tblTemp = "TEMPORARY ";
}
// For cases where the table is external
String tblExternal = "";
if (tbl.getTableType() == TableType.EXTERNAL_TABLE) {
duplicateProps.add("EXTERNAL");
tblExternal
= "EXTERNAL ";
}
createTabStringBuilder.append(String.format("CREATE %s%sTABLE `%s`",
tblTemp, tblExternal, tablePath.getFullName()));
// Columns
String tblColumns;
List<FieldSchema> cols = tbl.getCols();List<String> columns = new ArrayList<>();
for (FieldSchema col : cols) {
String columnDesc = ((" `" + col.getName()) + "` ") + col.getType();
if (col.getComment() != null) {
columnDesc = ((columnDesc + " COMMENT '") + HiveStringUtils.escapeHiveCommand(col.getComment())) + "'";
}
columns.add(columnDesc);
}
tblColumns = StringUtils.join(columns, ", \n");
createTabStringBuilder.append(String.format("(\n%s)\n", tblColumns));
// Table comment
String tblComment;String tabComment
= tbl.getProperty("comment");
if (tabComment != null) {
duplicateProps.add("comment");
tblComment = ("COMMENT '" + HiveStringUtils.escapeHiveCommand(tabComment)) + "'";
createTabStringBuilder.append(String.format("%s\n", tblComment));
}
// Partitions
String tblPartitions = "";
List<FieldSchema> partKeys = tbl.getPartitionKeys();
if (partKeys.size() > 0)
{
tblPartitions += "PARTITIONED BY ( \n";
List<String> partCols = new ArrayList<>();
for (FieldSchema partKey : partKeys) {
String partColDesc = ((" `" + partKey.getName()) + "` ") + partKey.getType();
if (partKey.getComment() != null) {
partColDesc = ((partColDesc + " COMMENT '") + HiveStringUtils.escapeHiveCommand(partKey.getComment())) + "'";
}
partCols.add(partColDesc);
}
tblPartitions += StringUtils.join(partCols, ", \n");
tblPartitions += ")";
}
if (!tblPartitions.equals("")) {
createTabStringBuilder.append(String.format("%s\n", tblPartitions));
}
// Clusters (Buckets)
String tblSortBucket = "";
List<String> buckCols = tbl.getBucketCols();
if (buckCols.size() > 0) {
duplicateProps.add("SORTBUCKETCOLSPREFIX");
tblSortBucket += "CLUSTERED BY ( \n ";
tblSortBucket += StringUtils.join(buckCols, ", \n ");
tblSortBucket += ") \n";
List<Order> sortCols = tbl.getSortCols();
if (sortCols.size() > 0) {
tblSortBucket += "SORTED BY ( \n";
// Order
List<String> sortKeys = new ArrayList<String>();
for (Order sortCol : sortCols) {
String sortKeyDesc = (" " + sortCol.getCol()) + " ";
if (sortCol.getOrder() ==
BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC) {
sortKeyDesc = sortKeyDesc + "ASC";
} else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) {
sortKeyDesc = sortKeyDesc + "DESC";
}
sortKeys.add(sortKeyDesc);} tblSortBucket += StringUtils.join(sortKeys, ", \n");
tblSortBucket += ") \n";
}
tblSortBucket += ("INTO " + tbl.getNumBuckets()) + " BUCKETS";
createTabStringBuilder.append(String.format("%s\n", tblSortBucket));
}
// Skewed Info
StringBuilder tblSkewedInfo = new StringBuilder();
SkewedInfo skewedInfo = tbl.getSkewedInfo();
if ((skewedInfo != null) && (!skewedInfo.getSkewedColNames().isEmpty())) {
tblSkewedInfo.append("SKEWED BY (").append(StringUtils.join(skewedInfo.getSkewedColNames(), ",")).append(")\n");
tblSkewedInfo.append(" ON (");
List<String> colValueList = new ArrayList<>();
for (List<String> colValues : skewedInfo.getSkewedColValues()) {
colValueList.add(("('" + StringUtils.join(colValues, "','")) + "')");
}
tblSkewedInfo.append(StringUtils.join(colValueList, ",")).append(")");
if (tbl.isStoredAsSubDirectories()) {
tblSkewedInfo.append("\n STORED AS DIRECTORIES");
}
createTabStringBuilder.append(String.format("%s\n", tblSkewedInfo));
}
// Row format (SerDe)
StringBuilder tblRowFormat = new StringBuilder();
StorageDescriptor sd = tbl.getTTable().getSd();
SerDeInfo serdeInfo = sd.getSerdeInfo();
Map<String, String> serdeParams = serdeInfo.getParameters();
tblRowFormat.append("ROW FORMAT SERDE \n");
tblRowFormat.append(" '").append(HiveStringUtils.escapeHiveCommand(serdeInfo.getSerializationLib())).append("' \n");
if
(tbl.getStorageHandler() == null) {
// If serialization.format property has the default value, it will not to be
// included in SERDE properties
if (DEFAULT_SERIALIZATION_FORMAT.equals(serdeParams.get(serdeConstants.SERIALIZATION_FORMAT))) {
serdeParams.remove(serdeConstants.SERIALIZATION_FORMAT);
}
if (!serdeParams.isEmpty()) {
appendSerdeParams(tblRowFormat, serdeParams).append(" \n");
}
tblRowFormat.append("STORED AS INPUTFORMAT \n '").append(HiveStringUtils.escapeHiveCommand(sd.getInputFormat())).append("' \n");
tblRowFormat.append("OUTPUTFORMAT \n '").append(HiveStringUtils.escapeHiveCommand(sd.getOutputFormat())).append("'");
} else {
duplicateProps.add(META_TABLE_STORAGE);
tblRowFormat.append("STORED BY \n '").append(HiveStringUtils.escapeHiveCommand(tbl.getParameters().get(META_TABLE_STORAGE))).append("' \n");
// SerDe Properties
if (!serdeParams.isEmpty()) {
appendSerdeParams(tblRowFormat, serdeInfo.getParameters());}
}
createTabStringBuilder.append(String.format("%s\n", tblRowFormat));// table location
if (needsLocation) {
String tblLocation = (" '" + HiveStringUtils.escapeHiveCommand(sd.getLocation())) + "'";
createTabStringBuilder.append(String.format("LOCATION\n%s\n", tblLocation));
}
// Table properties
duplicateProps.addAll(Arrays.stream(StatsSetupConst.TABLE_PARAMS_STATS_KEYS).collect(Collectors.toList()));
String
v33 = propertiesToString(tbl.getParameters(), duplicateProps);
createTabStringBuilder.append(String.format("TBLPROPERTIES (\n%s)\n", v33));
showCreateTableString = createTabStringBuilder.toString();
}
return showCreateTableString;} | 3.26 |
flink_WindowAssigner_getDefaultTrigger_rdh | /**
* Returns the default trigger associated with this {@code WindowAssigner}.
*
* <p>1. If you override {@code getDefaultTrigger()}, the {@code getDefaultTrigger()} will be
* invoked and the {@code getDefaultTrigger(StreamExecutionEnvironment env)} won't be invoked.
* 2. If you don't override {@code getDefaultTrigger()}, the {@code getDefaultTrigger(StreamExecutionEnvironment env)} will be invoked in the default
* implementation of the {@code getDefaultTrigger()}.
*/
public Trigger<T, W> getDefaultTrigger() {
return getDefaultTrigger(new StreamExecutionEnvironment());
}
/**
* Returns the default trigger associated with this {@code WindowAssigner}.
*
* @deprecated the method is deprecated since Flink 1.19 because {@code StreamExecutionEnvironment} is unused. Please use {@code getDefaultTrigger} and override
this method with an empty body instead. 1. If you override {@code getDefaultTrigger()},
the {@code getDefaultTrigger()} will be invoked and the {@code getDefaultTrigger(StreamExecutionEnvironment env)} won't be invoked. 2. If you don't
override {@code getDefaultTrigger()}, the {@code getDefaultTrigger(StreamExecutionEnvironment env)} will be invoked in the default
implementation of the {@code getDefaultTrigger()} | 3.26 |
flink_CoGroupOperatorBase_getGroupOrderForInputOne_rdh | /**
* Gets the order of elements within a group for the first input. If no such order has been set,
* this method returns null.
*
* @return The group order for the first input.
*/public Ordering getGroupOrderForInputOne() {
return getGroupOrder(0);
} | 3.26 |
flink_CoGroupOperatorBase_setGroupOrderForInputTwo_rdh | /**
* Sets the order of the elements within a group for the second input.
*
* @param order
* The order for the elements in a group.
*/
public void setGroupOrderForInputTwo(Ordering order) {
setGroupOrder(1, order);
} | 3.26 |
flink_CoGroupOperatorBase_setGroupOrder_rdh | // --------------------------------------------------------------------------------------------
/**
* Sets the order of the elements within a group for the given input.
*
* @param inputNum
* The number of the input (here either <i>0</i> or <i>1</i>).
* @param order
* The order for the elements in a group.
*/
public void
setGroupOrder(int inputNum,
Ordering order) {
if (inputNum == 0)
{
this.groupOrder1 = order;
} else if (inputNum == 1) {
this.groupOrder2 = order;
} else {
throw new IndexOutOfBoundsException();
}
} | 3.26 |
flink_CoGroupOperatorBase_setGroupOrderForInputOne_rdh | /**
* Sets the order of the elements within a group for the first input.
*
* @param order
* The order for the elements in a group.
*/
public void setGroupOrderForInputOne(Ordering order) {
setGroupOrder(0, order);
} | 3.26 |
flink_CoGroupOperatorBase_getGroupOrder_rdh | /**
* Gets the value order for an input, i.e. the order of elements within a group. If no such
* order has been set, this method returns null.
*
* @param inputNum
* The number of the input (here either <i>0</i> or <i>1</i>).
* @return The group order.
*/
public Ordering getGroupOrder(int inputNum) {
if (inputNum == 0) {
return this.groupOrder1;
} else if (inputNum == 1) {
return this.groupOrder2;
} else {
throw new IndexOutOfBoundsException();
}
} | 3.26 |
flink_CoGroupOperatorBase_executeOnCollections_rdh | // ------------------------------------------------------------------------
@Override
protected List<OUT> executeOnCollections(List<IN1> input1, List<IN2> input2, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception {
// --------------------------------------------------------------------
// Setup
// --------------------------------------------------------------------
TypeInformation<IN1> inputType1 = getOperatorInfo().getFirstInputType();
TypeInformation<IN2> inputType2 = getOperatorInfo().getSecondInputType();
// for the grouping / merging comparator
int[] inputKeys1 = getKeyColumns(0);
int[] inputKeys2 = getKeyColumns(1);
boolean[] inputDirections1 = new boolean[inputKeys1.length];
boolean[] inputDirections2 = new boolean[inputKeys2.length];
Arrays.fill(inputDirections1, true);
Arrays.fill(inputDirections2, true);
final TypeSerializer<IN1> inputSerializer1 = inputType1.createSerializer(executionConfig);
final TypeSerializer<IN2> inputSerializer2 = inputType2.createSerializer(executionConfig);
final TypeComparator<IN1> inputComparator1 = getTypeComparator(executionConfig, inputType1, inputKeys1, inputDirections1);
final TypeComparator<IN2> inputComparator2 = getTypeComparator(executionConfig, inputType2, inputKeys2, inputDirections2);
final TypeComparator<IN1> inputSortComparator1;
final TypeComparator<IN2> inputSortComparator2;
if ((groupOrder1 == null) || (groupOrder1.getNumberOfFields() == 0)) {
// no group sorting
inputSortComparator1 = inputComparator1;} else {
// group sorting
int[] groupSortKeys = groupOrder1.getFieldPositions();
int[] allSortKeys = new int[inputKeys1.length +
groupOrder1.getNumberOfFields()];
System.arraycopy(inputKeys1, 0, allSortKeys, 0, inputKeys1.length);
System.arraycopy(groupSortKeys, 0, allSortKeys, inputKeys1.length, groupSortKeys.length);boolean[] groupSortDirections = groupOrder1.getFieldSortDirections();
boolean[] allSortDirections = new boolean[inputKeys1.length + groupSortKeys.length];
Arrays.fill(allSortDirections, 0, inputKeys1.length, true);
System.arraycopy(groupSortDirections, 0, allSortDirections, inputKeys1.length, groupSortDirections.length);
inputSortComparator1 = getTypeComparator(executionConfig, inputType1, allSortKeys, allSortDirections);
}
if ((groupOrder2 == null) || (groupOrder2.getNumberOfFields() ==
0))
{
// no group sorting
inputSortComparator2 = inputComparator2;
} else {
// group sorting
int[] groupSortKeys = groupOrder2.getFieldPositions();
int[] allSortKeys = new int[inputKeys2.length + groupOrder2.getNumberOfFields()];
System.arraycopy(inputKeys2, 0, allSortKeys, 0, inputKeys2.length);
System.arraycopy(groupSortKeys, 0, allSortKeys, inputKeys2.length, groupSortKeys.length);
boolean[] groupSortDirections = groupOrder2.getFieldSortDirections();
boolean[] allSortDirections = new boolean[inputKeys2.length + groupSortKeys.length];
Arrays.fill(allSortDirections, 0, inputKeys2.length, true);
System.arraycopy(groupSortDirections, 0, allSortDirections, inputKeys2.length, groupSortDirections.length);
inputSortComparator2 = getTypeComparator(executionConfig, inputType2, allSortKeys, allSortDirections);
}
CoGroupSortListIterator<IN1, IN2> coGroupIterator = new CoGroupSortListIterator<IN1, IN2>(input1, inputSortComparator1, inputComparator1, inputSerializer1, input2, inputSortComparator2, inputComparator2, inputSerializer2);
// --------------------------------------------------------------------
// Run UDF
// --------------------------------------------------------------------
CoGroupFunction<IN1, IN2, OUT> function = userFunction.getUserCodeObject();
FunctionUtils.setFunctionRuntimeContext(function, ctx);
FunctionUtils.openFunction(function, DefaultOpenContext.INSTANCE);
List<OUT> result = new ArrayList<OUT>();
Collector<OUT> v23 = new CopyingListCollector<OUT>(result, getOperatorInfo().getOutputType().createSerializer(executionConfig));
while (coGroupIterator.next()) {
function.coGroup(coGroupIterator.getValues1(), coGroupIterator.getValues2(), v23);
}
FunctionUtils.closeFunction(function);
return result; } | 3.26 |
flink_CoGroupOperatorBase_isCombinableFirst_rdh | // --------------------------------------------------------------------------------------------
public boolean isCombinableFirst() {
return this.combinableFirst;
} | 3.26 |
flink_RequestQueue_m0_rdh | /**
* Closes this request queue.
*
* @see java.io.Closeable#close()
*/
@Override
public void m0() {
this.closed = true;
} | 3.26 |
flink_AbstractCheckpointMessage_hashCode_rdh | // --------------------------------------------------------------------------------------------
@Override
public int hashCode() {
return (job.hashCode() + f0.hashCode()) + ((int) (checkpointId ^ (checkpointId >>> 32)));} | 3.26 |
flink_AbstractCheckpointMessage_m0_rdh | // --------------------------------------------------------------------------------------------
public JobID m0() {
return job;
} | 3.26 |
flink_ColumnarRowIterator_set_rdh | /**
* Set number rows in this batch and updates the position.
*/
public void set(final int num, final long offset, final long recordSkipCount) {
this.num = num;
this.pos = 0;
this.recordAndPosition.set(null, offset, recordSkipCount);
} | 3.26 |
flink_RichIterativeCondition_open_rdh | // --------------------------------------------------------------------------------------------
// Default life cycle methods
// --------------------------------------------------------------------------------------------
@Override
public void open(OpenContext openContext) throws Exception {
} | 3.26 |
flink_DelegationTokenReceiver_serviceConfigPrefix_rdh | /**
* Config prefix of the service.
*/
default String serviceConfigPrefix() {
return String.format("%s.%s", CONFIG_PREFIX, serviceName());
} | 3.26 |
flink_JobEdge_isBroadcast_rdh | /**
* Gets whether the edge is broadcast edge.
*/
public boolean isBroadcast() {
return isBroadcast;
} | 3.26 |
flink_JobEdge_getShipStrategyName_rdh | // --------------------------------------------------------------------------------------------
/**
* Gets the name of the ship strategy for the represented input, like "forward", "partition
* hash", "rebalance", "broadcast", ...
*
* @return The name of the ship strategy for the represented input, or null, if none was set.
*/
public String getShipStrategyName() {
return shipStrategyName;
} | 3.26 |
flink_JobEdge_getOperatorLevelCachingDescription_rdh | /**
* Gets the operator-level caching description for this input.
*
* @return The description of operator-level caching, or null, is none was set.
*/
public String getOperatorLevelCachingDescription() {
return operatorLevelCachingDescription;} | 3.26 |
flink_JobEdge_setPreProcessingOperationName_rdh | /**
* Sets the name of the pre-processing operation for this input.
*
* @param preProcessingOperationName
* The name of the pre-processing operation.
*/
public void setPreProcessingOperationName(String preProcessingOperationName) {
this.preProcessingOperationName = preProcessingOperationName;
} | 3.26 |
flink_JobEdge_isForward_rdh | /**
* Gets whether the edge is forward edge.
*/
public boolean isForward() {
return isForward;
} | 3.26 |
flink_JobEdge_setUpstreamSubtaskStateMapper_rdh | /**
* Sets the channel state rescaler used for rescaling persisted data on upstream side of this
* JobEdge.
*
* @param upstreamSubtaskStateMapper
* The channel state rescaler selector to use.
*/
public void setUpstreamSubtaskStateMapper(SubtaskStateMapper upstreamSubtaskStateMapper) {
this.upstreamSubtaskStateMapper = checkNotNull(upstreamSubtaskStateMapper);
} | 3.26 |
flink_JobEdge_m0_rdh | /**
* Returns the distribution pattern used for this edge.
*
* @return The distribution pattern used for this edge.
*/
public DistributionPattern m0() {
return this.distributionPattern;
} | 3.26 |
flink_JobEdge_setShipStrategyName_rdh | /**
* Sets the name of the ship strategy for the represented input.
*
* @param shipStrategyName
* The name of the ship strategy.
*/
public void setShipStrategyName(String shipStrategyName) {
this.shipStrategyName = shipStrategyName;
} | 3.26 |
flink_JobEdge_getUpstreamSubtaskStateMapper_rdh | /**
* Gets the channel state rescaler used for rescaling persisted data on upstream side of this
* JobEdge.
*
* @return The channel state rescaler to use, or null, if none was set.
*/
public SubtaskStateMapper getUpstreamSubtaskStateMapper() {
return upstreamSubtaskStateMapper;
} | 3.26 |
flink_JobEdge_getTarget_rdh | /**
* Returns the vertex connected to this edge.
*
* @return The vertex connected to this edge.
*/
public JobVertex getTarget() {
return target;} | 3.26 |
flink_JobEdge_getSource_rdh | /**
* Returns the data set at the source of the edge. May be null, if the edge refers to the source
* via an ID and has not been connected.
*
* @return The data set at the source of the edge
*/
public IntermediateDataSet getSource() {return source;
} | 3.26 |
flink_JobEdge_toString_rdh | // --------------------------------------------------------------------------------------------
@Overridepublic String toString() {
return String.format("%s --> %s [%s]", source.getId(), target, distributionPattern.name());
} | 3.26 |
flink_JobEdge_getPreProcessingOperationName_rdh | /**
* Gets the name of the pro-processing operation for this input.
*
* @return The name of the pro-processing operation, or null, if none was set.
*/
public String getPreProcessingOperationName() {
return preProcessingOperationName;
} | 3.26 |
flink_JobEdge_setDownstreamSubtaskStateMapper_rdh | /**
* Sets the channel state rescaler used for rescaling persisted data on downstream side of this
* JobEdge.
*
* @param downstreamSubtaskStateMapper
* The channel state rescaler selector to use.
*/
public void setDownstreamSubtaskStateMapper(SubtaskStateMapper downstreamSubtaskStateMapper) {
this.downstreamSubtaskStateMapper = checkNotNull(downstreamSubtaskStateMapper);
} | 3.26 |
flink_JobEdge_setForward_rdh | /**
* Sets whether the edge is forward edge.
*/public void setForward(boolean forward) {
isForward = forward;
} | 3.26 |
flink_JobEdge_setOperatorLevelCachingDescription_rdh | /**
* Sets the operator-level caching description for this input.
*
* @param operatorLevelCachingDescription
* The description of operator-level caching.
*/
public void setOperatorLevelCachingDescription(String operatorLevelCachingDescription) {
this.operatorLevelCachingDescription = operatorLevelCachingDescription;
} | 3.26 |
flink_ShowCreateUtil_buildShowCreateViewRow_rdh | /**
* Show create view statement only for views.
*/
public static String buildShowCreateViewRow(ResolvedCatalogBaseTable<?> view, ObjectIdentifier viewIdentifier, boolean isTemporary) {
if (view.getTableKind() != TableKind.VIEW) {
throw new TableException(String.format("SHOW CREATE VIEW is only supported for views, but %s is a table. Please use SHOW CREATE TABLE instead.", viewIdentifier.asSerializableString()));
}
StringBuilder stringBuilder = new StringBuilder();
if (view.getOrigin() instanceof QueryOperationCatalogView) {
throw new TableException("SHOW CREATE VIEW is not supported for views registered by Table API.");} else {
stringBuilder.append(String.format("CREATE %sVIEW %s%s as%s%s", isTemporary ? "TEMPORARY " : "", viewIdentifier.asSerializableString(), String.format("(%s)", extractFormattedColumnNames(view)), System.lineSeparator(), ((CatalogView) (view.getOrigin())).getExpandedQuery()));
}
extractFormattedComment(view).ifPresent(c -> stringBuilder.append(String.format(" COMMENT '%s'%s", c, System.lineSeparator())));
return stringBuilder.toString();
} | 3.26 |
flink_MiniClusterConfiguration_getRpcServiceSharing_rdh | // ------------------------------------------------------------------------
// getters
// ------------------------------------------------------------------------
public RpcServiceSharing getRpcServiceSharing() {
return rpcServiceSharing;
} | 3.26 |
flink_CompletedCheckpointStatsSummary_getStateSizeStats_rdh | /**
* Returns the summary stats for the state size of completed checkpoints.
*
* @return Summary stats for the state size.
*/public StatsSummary getStateSizeStats() {
return stateSize;
} | 3.26 |
flink_CompletedCheckpointStatsSummary_createSnapshot_rdh | /**
* Creates a snapshot of the current state.
*
* @return A snapshot of the current state.
*/
CompletedCheckpointStatsSummarySnapshot createSnapshot() {
return new CompletedCheckpointStatsSummarySnapshot(duration.createSnapshot(), processedData.createSnapshot(), persistedData.createSnapshot(), stateSize.createSnapshot(), checkpointedSize.createSnapshot());
} | 3.26 |
flink_SavepointWriter_removeOperator_rdh | /**
* Drop an existing operator from the savepoint.
*
* @param identifier
* The identifier of the operator.
* @return A modified savepoint.
*/public SavepointWriter removeOperator(OperatorIdentifier identifier) {
metadata.removeOperator(identifier);
return this;
} | 3.26 |
flink_SavepointWriter_newSavepoint_rdh | /**
* Creates a new savepoint.
*
* @param stateBackend
* The state backend of the savepoint used for keyed state.
* @param maxParallelism
* The max parallelism of the savepoint.
* @return A {@link SavepointWriter}.
* @see #newSavepoint(StreamExecutionEnvironment, int)
*/
public static SavepointWriter newSavepoint(StreamExecutionEnvironment executionEnvironment, StateBackend stateBackend, int maxParallelism) {
return new SavepointWriter(createSavepointMetadata(maxParallelism), stateBackend, executionEnvironment);
} | 3.26 |
flink_SavepointWriter_m0_rdh | /**
*
* @deprecated use {@link #newSavepoint(StreamExecutionEnvironment, StateBackend, int)}
*/
@Deprecated
public static SavepointWriter m0(StateBackend stateBackend, int maxParallelism) {
return new
SavepointWriter(createSavepointMetadata(maxParallelism), stateBackend, null);
} | 3.26 |
flink_SavepointWriter_fromExistingSavepoint_rdh | /**
* Loads an existing savepoint. Useful if you want to modify or extend the state of an existing
* application.
*
* @param path
* The path to an existing savepoint on disk.
* @param stateBackend
* The state backend of the savepoint.
* @return A {@link SavepointWriter}.
* @see #fromExistingSavepoint(String)
*/
public static SavepointWriter fromExistingSavepoint(StreamExecutionEnvironment executionEnvironment, String path, StateBackend stateBackend) throws IOException {
return new SavepointWriter(readSavepointMetadata(path), stateBackend, executionEnvironment);
} | 3.26 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.