conflict_resolution
stringlengths
27
16k
<<<<<<< import org.apache.hadoop.hbase.Cell; ======= import org.apache.hadoop.hbase.HConstants; >>>>>>> import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Cell;
<<<<<<< import org.apache.phoenix.util.ConfigUtil; import org.apache.phoenix.util.JDBCUtil; ======= >>>>>>> import org.apache.phoenix.util.ConfigUtil; import org.apache.phoenix.util.JDBCUtil; <<<<<<< import com.google.protobuf.HBaseZeroCopyByteString; ======= import com.google.common.io.Closeables; >>>>>>> import com.google.common.io.Closeables; import com.google.protobuf.HBaseZeroCopyByteString; <<<<<<< ======= try { this.connection = HBaseFactoryProvider.getHConnectionFactory().createConnection(this.config); } catch (ZooKeeperConnectionException e) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION) .setRootCause(e).build().buildException(); } if (this.connection.isClosed()) { // TODO: why the heck doesn't this throw above? throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION).build().buildException(); } this.latestMetaData = newEmptyMetaData(); >>>>>>> try { this.connection = HBaseFactoryProvider.getHConnectionFactory().createConnection(this.config); } catch (IOException e) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION) .setRootCause(e).build().buildException(); } if (this.connection.isClosed()) { // TODO: why the heck doesn't this throw above? throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION).build().buildException(); } this.latestMetaData = newEmptyMetaData(); <<<<<<< HTableDescriptor descriptor = (existingDesc != null) ? new HTableDescriptor(existingDesc) : new HTableDescriptor(TableName.valueOf(tableName)); ======= String defaultFamilyName = (String)tableProps.remove(PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME); HTableDescriptor descriptor = (existingDesc != null) ? new HTableDescriptor(existingDesc) : new HTableDescriptor(tableName); >>>>>>> String defaultFamilyName = (String)tableProps.remove(PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME); HTableDescriptor descriptor = (existingDesc != null) ? new HTableDescriptor(existingDesc) : new HTableDescriptor(TableName.valueOf(tableName)); <<<<<<< HTableInterface ht = this.getTable(PhoenixDatabaseMetaData.TYPE_TABLE_NAME_BYTES); final Map<byte[], Long> results = ht.coprocessorService(MetaDataService.class, null, null, new Batch.Call<MetaDataService,Long>() { ======= final TreeMap<byte[],Long> results = Maps.newTreeMap(Bytes.BYTES_COMPARATOR); connection.processExecs(MetaDataProtocol.class, regionKeys, PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES, this.getDelegate().getExecutor(), new Batch.Call<MetaDataProtocol,Long>() { >>>>>>> HTableInterface ht = this.getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES); final Map<byte[], Long> results = ht.coprocessorService(MetaDataService.class, null, null, new Batch.Call<MetaDataService,Long>() { <<<<<<< if (retried) { connection.relocateRegion( TableName.valueOf(PhoenixDatabaseMetaData.TYPE_TABLE_NAME_BYTES), tableKey); } HTableInterface ht = this.getTable(PhoenixDatabaseMetaData.TYPE_TABLE_NAME_BYTES); final Map<byte[], MetaDataResponse> results = ht.coprocessorService(MetaDataService.class, tableKey, tableKey, callable); ======= HRegionLocation regionLocation = retried ? connection.relocateRegion(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES, tableKey) : connection.locateRegion(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES, tableKey); List<byte[]> regionKeys = Collections.singletonList(regionLocation.getRegionInfo().getStartKey()); final Map<byte[],MetaDataMutationResult> results = Maps.newHashMapWithExpectedSize(1); connection.processExecs(MetaDataProtocol.class, regionKeys, PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES, this.getDelegate().getExecutor(), callable, new Batch.Callback<MetaDataMutationResult>(){ @Override public void update(byte[] region, byte[] row, MetaDataMutationResult value) { results.put(region, value); } }); >>>>>>> if (retried) { connection.relocateRegion( TableName.valueOf(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES), tableKey); } HTableInterface ht = this.getTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES); final Map<byte[], MetaDataResponse> results = ht.coprocessorService(MetaDataService.class, tableKey, tableKey, callable); <<<<<<< new Batch.Call<MetaDataService, MetaDataResponse>() { @Override public MetaDataResponse call(MetaDataService instance) throws IOException { ServerRpcController controller = new ServerRpcController(); BlockingRpcCallback<MetaDataResponse> rpcCallback = new BlockingRpcCallback<MetaDataResponse>(); GetTableRequest.Builder builder = GetTableRequest.newBuilder(); builder.setTenantId(HBaseZeroCopyByteString.wrap(nonNullTenantId)); builder.setSchemaName(HBaseZeroCopyByteString.wrap(schemaBytes)); builder.setTableName(HBaseZeroCopyByteString.wrap(tableBytes)); builder.setTableTimestamp(tableTimestamp); builder.setClientTimestamp(clientTimestamp); instance.getTable(controller, builder.build(), rpcCallback); if(controller.getFailedOn() != null) { throw controller.getFailedOn(); } return rpcCallback.get(); } }); ======= new Batch.Call<MetaDataProtocol, MetaDataMutationResult>() { @Override public MetaDataMutationResult call(MetaDataProtocol instance) throws IOException { return instance.getTable(tenantIdBytes, schemaBytes, tableBytes, tableTimestamp, clientTimestamp); } }); >>>>>>> new Batch.Call<MetaDataService, MetaDataResponse>() { @Override public MetaDataResponse call(MetaDataService instance) throws IOException { ServerRpcController controller = new ServerRpcController(); BlockingRpcCallback<MetaDataResponse> rpcCallback = new BlockingRpcCallback<MetaDataResponse>(); GetTableRequest.Builder builder = GetTableRequest.newBuilder(); builder.setTenantId(HBaseZeroCopyByteString.wrap(tenantIdBytes)); builder.setSchemaName(HBaseZeroCopyByteString.wrap(schemaBytes)); builder.setTableName(HBaseZeroCopyByteString.wrap(tableBytes)); builder.setTableTimestamp(tableTimestamp); builder.setClientTimestamp(clientTimestamp); instance.getTable(controller, builder.build(), rpcCallback); if(controller.getFailedOn() != null) { throw controller.getFailedOn(); } return rpcCallback.get(); } });
<<<<<<< import org.apache.hadoop.hbase.util.Writables; ======= >>>>>>>
<<<<<<< if (result.rawCells().length == 1) { Cell errorKV = result.rawCells()[0]; int errorCode = PDataType.INTEGER.getCodec().decodeInt(errorKV.getValueArray(), errorKV.getValueOffset(), null); ======= if (result.raw().length == 1) { KeyValue errorKV = result.raw()[0]; int errorCode = PDataType.INTEGER.getCodec().decodeInt(errorKV.getBuffer(), errorKV.getValueOffset(), SortOrder.getDefault()); >>>>>>> if (result.rawCells().length == 1) { Cell errorKV = result.rawCells()[0]; int errorCode = PDataType.INTEGER.getCodec().decodeInt(errorKV.getValueArray(), errorKV.getValueOffset(), SortOrder.getDefault()); <<<<<<< nextValue = PDataType.LONG.getCodec().decodeLong(currentValueKV.getValueArray(), currentValueKV.getValueOffset(), null); incrementBy = PDataType.LONG.getCodec().decodeLong(incrementByKV.getValueArray(), incrementByKV.getValueOffset(), null); cacheSize = PDataType.INTEGER.getCodec().decodeInt(cacheSizeKV.getValueArray(), cacheSizeKV.getValueOffset(), null); ======= nextValue = PDataType.LONG.getCodec().decodeLong(currentValueKV.getBuffer(), currentValueKV.getValueOffset(), SortOrder.getDefault()); incrementBy = PDataType.LONG.getCodec().decodeLong(incrementByKV.getBuffer(), incrementByKV.getValueOffset(), SortOrder.getDefault()); cacheSize = PDataType.INTEGER.getCodec().decodeInt(cacheSizeKV.getBuffer(), cacheSizeKV.getValueOffset(), SortOrder.getDefault()); >>>>>>> nextValue = PDataType.LONG.getCodec().decodeLong(currentValueKV.getValueArray(), currentValueKV.getValueOffset(), SortOrder.getDefault()); incrementBy = PDataType.LONG.getCodec().decodeLong(incrementByKV.getValueArray(), incrementByKV.getValueOffset(), SortOrder.getDefault()); cacheSize = PDataType.INTEGER.getCodec().decodeInt(cacheSizeKV.getValueArray(), cacheSizeKV.getValueOffset(), SortOrder.getDefault()); <<<<<<< int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getValueArray(), statusKV.getValueOffset(), null); ======= int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getBuffer(), statusKV.getValueOffset(), SortOrder.getDefault()); >>>>>>> int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getValueArray(), statusKV.getValueOffset(), SortOrder.getDefault()); <<<<<<< int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getValueArray(), statusKV.getValueOffset(), null); ======= int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getBuffer(), statusKV.getValueOffset(), SortOrder.getDefault()); >>>>>>> int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getValueArray(), statusKV.getValueOffset(), SortOrder.getDefault()); <<<<<<< int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getValueArray(), statusKV.getValueOffset(), null); ======= int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getBuffer(), statusKV.getValueOffset(), SortOrder.getDefault()); >>>>>>> int statusCode = PDataType.INTEGER.getCodec().decodeInt(statusKV.getValueArray(), statusKV.getValueOffset(), SortOrder.getDefault());
<<<<<<< import org.apache.phoenix.client.GenericKeyValueBuilder; import org.apache.phoenix.coprocessor.generated.MetaDataProtos; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest; ======= import org.apache.phoenix.hbase.index.util.GenericKeyValueBuilder; >>>>>>> import org.apache.phoenix.coprocessor.generated.MetaDataProtos; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest; <<<<<<< Cell typeKeyValue = KeyValueUtil.getColumnLatest(results, PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.TABLE_TYPE_BYTES); assert (typeKeyValue != null && typeKeyValue.getValueLength() == 1); if (tableType != PTableType.fromSerializedValue(typeKeyValue.getValueArray()[typeKeyValue .getValueOffset()])) { ======= KeyValue typeKeyValue = KeyValueUtil.getColumnLatest(GenericKeyValueBuilder.INSTANCE, results, PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.TABLE_TYPE_BYTES); assert(typeKeyValue != null && typeKeyValue.getValueLength() == 1); if ( tableType != PTableType.fromSerializedValue(typeKeyValue.getBuffer()[typeKeyValue.getValueOffset()])) { >>>>>>> Cell typeKeyValue = KeyValueUtil.getColumnLatest(GenericKeyValueBuilder.INSTANCE, results, PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.TABLE_TYPE_BYTES); assert (typeKeyValue != null && typeKeyValue.getValueLength() == 1); if (tableType != PTableType.fromSerializedValue(typeKeyValue.getValueArray()[typeKeyValue .getValueOffset()])) {
<<<<<<< public Cell getValue(byte[] family, byte[] qualifier) { return KeyValueUtil.getColumnLatest(values, family, qualifier); ======= public KeyValue getValue(byte[] family, byte[] qualifier) { return KeyValueUtil.getColumnLatest(GenericKeyValueBuilder.INSTANCE, values, family, qualifier); >>>>>>> public Cell getValue(byte[] family, byte[] qualifier) { return KeyValueUtil.getColumnLatest(GenericKeyValueBuilder.INSTANCE, values, family, qualifier);
<<<<<<< public Put buildUpdateMutation(ValueGetter valueGetter, ImmutableBytesWritable dataRowKeyPtr, long ts) throws IOException { ======= @SuppressWarnings("deprecation") public Put buildUpdateMutation(KeyValueBuilder kvBuilder, ValueGetter valueGetter, ImmutableBytesWritable dataRowKeyPtr, long ts) throws IOException { >>>>>>> public Put buildUpdateMutation(KeyValueBuilder kvBuilder, ValueGetter valueGetter, ImmutableBytesWritable dataRowKeyPtr, long ts) throws IOException {
<<<<<<< ======= import android.util.Log; import android.widget.Button; >>>>>>> <<<<<<< ======= Log.d(TAG, "onCreate: Rotatable 2 has been called!"); // rotatable3 = new Rotatable(Color.parseColor("#123456"), 1000, "4", "5", "6"); // rotatable3.setSize(25); // rotatable3.setTypeface(typeface); // rotatable3.setInterpolator(new DecelerateInterpolator()); // rotatable3.setAnimationDuration(500); // rotatingTextWrapper.setContent("?abc ? abc", rotatable, rotatable2); >>>>>>> Log.d(TAG, "onCreate: Rotatable 2 has been called!"); // rotatable3 = new Rotatable(Color.parseColor("#123456"), 1000, "4", "5", "6"); // rotatable3.setSize(25); // rotatable3.setTypeface(typeface); // rotatable3.setInterpolator(new DecelerateInterpolator()); // rotatable3.setAnimationDuration(500); // rotatingTextWrapper.setContent("?abc ? abc", rotatable, rotatable2);
<<<<<<< private String currentText = ""; ======= String currentText = ""; String oldText = ""; boolean animationRunning=false; >>>>>>> private String currentText = ""; private String oldText = ""; boolean animationRunning=false; <<<<<<< ======= // Log.i("knock", aLong + ""); >>>>>>> <<<<<<< public boolean isPaused() { return isPaused; } ======= >>>>>>> public boolean isPaused() { return isPaused; }
<<<<<<< protected MaintainDBService maintainDBService; ======= >>>>>>>
<<<<<<< // TODO: add put ======= /** * This endpoint delete a attribute that matches the given id. * * @param id an attribute identifier * @return status 200 if ok or 404 if id not found/invalid * @throws DMPControllerException */ @ApiOperation(value = "delete attribute that matches the given id", notes = "Returns status 200 or 404.") @DELETE @Path("/{id}") @Override public Response deleteObject(@ApiParam(value = "attribute identifier", required = true) @PathParam("id") final String id) throws DMPControllerException { return super.deleteObject(id); } >>>>>>> // TODO: add put /** * This endpoint delete a attribute that matches the given id. * * @param id an attribute identifier * @return status 200 if ok or 404 if id not found/invalid * @throws DMPControllerException */ @ApiOperation(value = "delete attribute that matches the given id", notes = "Returns status 200 or 404.") @DELETE @Path("/{id}") @Override public Response deleteObject(@ApiParam(value = "attribute identifier", required = true) @PathParam("id") final String id) throws DMPControllerException { return super.deleteObject(id); }
<<<<<<< createSchemaDataModel(bibrmContractDM, bibrmContractDM, bibrmContractSchema); createSchemaDataModel(biboDocumentDM, biboDocumentDM, biboDocumentSchema); createSchemaDataModel(mabxmlSchemaDM, mabxmlSchemaDM, mabxmlSchema); createSchemaDataModel(pnxSchemaDM, pnxSchemaDM, pnxSchema); createSchemaDataModel(foafPersonDM, foafPersonDM, foafPersonSchema); ======= createSchemaDataModel(DataModelUtils.BIBRM_CONTRACT_DATA_MODEL_UUID, bibrmContractDM, bibrmContractDM, bibrmContractSchema); createSchemaDataModel(DataModelUtils.BIBO_DOCUMENT_DATA_MODEL_UUID, biboDocumentDM, biboDocumentDM, biboDocumentSchema); createSchemaDataModel(DataModelUtils.MABXML_DATA_MODEL_UUID, mabxmlSchemaDM, mabxmlSchemaDM, mabxmlSchema); createSchemaDataModel(DataModelUtils.FOAF_PERSON_DATA_MODEL_UUID, foafPersonDM, foafPersonDM, foafPersonSchema); >>>>>>> createSchemaDataModel(DataModelUtils.BIBRM_CONTRACT_DATA_MODEL_UUID, bibrmContractDM, bibrmContractDM, bibrmContractSchema); createSchemaDataModel(DataModelUtils.BIBO_DOCUMENT_DATA_MODEL_UUID, biboDocumentDM, biboDocumentDM, biboDocumentSchema); createSchemaDataModel(DataModelUtils.MABXML_DATA_MODEL_UUID, mabxmlSchemaDM, mabxmlSchemaDM, mabxmlSchema); createSchemaDataModel(pnxSchemaDM, pnxSchemaDM, pnxSchema); createSchemaDataModel(DataModelUtils.FOAF_PERSON_DATA_MODEL_UUID, foafPersonDM, foafPersonDM, foafPersonSchema);
<<<<<<< import de.avgl.dmp.persistence.DMPPersistenceException; import de.avgl.dmp.persistence.model.schema.Attribute; ======= import de.avgl.dmp.controller.resources.utils.ResourceUtilsFactory; >>>>>>> import de.avgl.dmp.controller.resources.utils.ResourceUtilsFactory; import de.avgl.dmp.persistence.DMPPersistenceException; import de.avgl.dmp.persistence.model.schema.Attribute;
<<<<<<< ======= import com.google.common.base.Preconditions; import org.apache.calcite.sql.JoinType; import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.flink.api.java.typeutils.RowTypeInfo; >>>>>>> import com.google.common.base.Preconditions; import org.apache.calcite.sql.JoinType; import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.flink.api.java.typeutils.RowTypeInfo; <<<<<<< import org.apache.calcite.sql.*; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.table.runtime.typeutils.RowDataTypeInfo; ======= import org.apache.flink.table.runtime.typeutils.BaseRowTypeInfo; >>>>>>> import org.apache.flink.table.runtime.typeutils.RowDataTypeInfo; <<<<<<< protected String[] lookupKeys; public BaseSideInfo(AbstractSideTableInfo sideTableInfo, String[] lookupKeys) { this.lookupKeys = lookupKeys; this.sideTableInfo = sideTableInfo; buildEqualInfo(sideTableInfo); } ======= protected JoinInfo joinInfo; >>>>>>> protected JoinInfo joinInfo; protected String[] lookupKeys; public BaseSideInfo(AbstractSideTableInfo sideTableInfo, String[] lookupKeys) { this.lookupKeys = lookupKeys; this.sideTableInfo = sideTableInfo; buildEqualInfo(sideTableInfo); } <<<<<<< public void dealOneEqualCon(SqlNode sqlNode, String sideTableName) { if (!SqlKind.COMPARISON.contains(sqlNode.getKind())) { ======= public void dealOneEqualCon(SqlNode sqlNode, String sideTableName) { if (!SqlKind.COMPARISON.contains(sqlNode.getKind())) { >>>>>>> public void dealOneEqualCon(SqlNode sqlNode, String sideTableName) { if (!SqlKind.COMPARISON.contains(sqlNode.getKind())) { <<<<<<< SqlIdentifier left = (SqlIdentifier) ((SqlBasicCall) sqlNode).getOperands()[0]; SqlIdentifier right = (SqlIdentifier) ((SqlBasicCall) sqlNode).getOperands()[1]; ======= SqlNode leftNode = ((SqlBasicCall) sqlNode).getOperands()[0]; SqlNode rightNode = ((SqlBasicCall) sqlNode).getOperands()[1]; if (leftNode.getKind() == SqlKind.LITERAL) { evalConstantEquation( (SqlLiteral) leftNode, (SqlIdentifier) rightNode ); } else if (rightNode.getKind() == SqlKind.LITERAL) { evalConstantEquation( (SqlLiteral) rightNode, (SqlIdentifier) leftNode ); } else { SqlIdentifier left = (SqlIdentifier) leftNode; SqlIdentifier right = (SqlIdentifier) rightNode; evalEquation(left, right, sideTableName, sqlNode); } } >>>>>>> SqlNode leftNode = ((SqlBasicCall) sqlNode).getOperands()[0]; SqlNode rightNode = ((SqlBasicCall) sqlNode).getOperands()[1]; if (leftNode.getKind() == SqlKind.LITERAL) { evalConstantEquation( (SqlLiteral) leftNode, (SqlIdentifier) rightNode ); } else if (rightNode.getKind() == SqlKind.LITERAL) { evalConstantEquation( (SqlLiteral) rightNode, (SqlIdentifier) leftNode ); } else { SqlIdentifier left = (SqlIdentifier) leftNode; SqlIdentifier right = (SqlIdentifier) rightNode; evalEquation(left, right, sideTableName, sqlNode); } }
<<<<<<< //Create an executer with 2 reusable threads final ExecutorService executor = Executors.newFixedThreadPool(2); // Create a context for media driver connection ======= >>>>>>> //Create an executer with 2 reusable threads final ExecutorService executor = Executors.newFixedThreadPool(2); // Create a context for media driver connection <<<<<<< // Create a rate reporter to run every seconds final RateReporter reporter = new RateReporter(TimeUnit.SECONDS.toNanos(1), StreamingPublisher::printRate); executor.execute(reporter); // Create a barrier which will ask to restart publisher after program's termination ======= >>>>>>> // Create a barrier which will ask to restart publisher after program's termination
<<<<<<< for (String key : colNames) { ======= for(String key : colNames){ if (ROWKEY.equalsIgnoreCase(key)) { sideVal.add(rowKeyStr); continue; } >>>>>>> for (String key : colNames) { if (ROWKEY.equalsIgnoreCase(key)) { sideVal.add(rowKeyStr); continue; }
<<<<<<< import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl; ======= >>>>>>> import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl; <<<<<<< import java.util.Map; ======= import java.util.Arrays; >>>>>>> import java.util.Map; import java.util.Arrays; <<<<<<< if (sinkFieldNames.length != queryFieldNames.length) { throw new ValidationException( "Field name of query result and registered TableSink " + targetTableName + " do not match.\n" + "Query result schema: " + String.join(",", queryFieldNames) + "\n" + "TableSink schema: " + String.join(",", sinkFieldNames)); } Table newTable; ======= Table newTable; >>>>>>> Table newTable;
<<<<<<< import static java.util.stream.Collectors.toList; import static uk.co.real_logic.aeron.common.ErrorCode.GENERIC_ERROR; import static uk.co.real_logic.aeron.common.ErrorCode.INVALID_CHANNEL; import static uk.co.real_logic.aeron.common.ErrorCode.UNKNOWN_PUBLICATION; import static uk.co.real_logic.aeron.common.ErrorCode.UNKNOWN_SUBSCRIPTION; import static uk.co.real_logic.aeron.common.command.ControlProtocolEvents.ADD_PUBLICATION; import static uk.co.real_logic.aeron.common.command.ControlProtocolEvents.ADD_SUBSCRIPTION; import static uk.co.real_logic.aeron.common.command.ControlProtocolEvents.CLIENT_KEEPALIVE; import static uk.co.real_logic.aeron.common.command.ControlProtocolEvents.REMOVE_PUBLICATION; import static uk.co.real_logic.aeron.common.command.ControlProtocolEvents.REMOVE_SUBSCRIPTION; import static uk.co.real_logic.aeron.driver.Configuration.CONNECTION_LIVENESS_TIMEOUT_NS; import static uk.co.real_logic.aeron.driver.Configuration.RETRANS_UNICAST_DELAY_DEFAULT_NS; import static uk.co.real_logic.aeron.driver.Configuration.RETRANS_UNICAST_LINGER_DEFAULT_NS; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Supplier; import uk.co.real_logic.aeron.common.FeedbackDelayGenerator; ======= >>>>>>> <<<<<<< ======= import java.net.InetSocketAddress; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Supplier; import static java.util.stream.Collectors.toList; import static uk.co.real_logic.aeron.common.ErrorCode.*; import static uk.co.real_logic.aeron.common.command.ControlProtocolEvents.*; import static uk.co.real_logic.aeron.common.event.EventConfiguration.EVENT_READER_FRAME_LIMIT; import static uk.co.real_logic.aeron.driver.Configuration.*; import static uk.co.real_logic.aeron.driver.MediaDriver.Context; >>>>>>> import java.net.InetSocketAddress; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Supplier; import static java.util.stream.Collectors.toList; import static uk.co.real_logic.aeron.common.ErrorCode.*; import static uk.co.real_logic.aeron.common.command.ControlProtocolEvents.*; import static uk.co.real_logic.aeron.common.event.EventConfiguration.EVENT_READER_FRAME_LIMIT; import static uk.co.real_logic.aeron.driver.Configuration.*; import static uk.co.real_logic.aeron.driver.MediaDriver.Context; <<<<<<< public static final int HEARTBEAT_TIMEOUT_MS = 1000; // how often to check liveness & cleanup /** * Unicast NAK delay is immediate initial with delayed subsequent delay */ public static final StaticDelayGenerator NAK_UNICAST_DELAY_GENERATOR = new StaticDelayGenerator( Configuration.NAK_UNICAST_DELAY_DEFAULT_NS, true); public static final StaticDelayGenerator NO_NAK_DELAY_GENERATOR = new StaticDelayGenerator( -1, false); public static final OptimalMulticastDelayGenerator NAK_MULTICAST_DELAY_GENERATOR = new OptimalMulticastDelayGenerator( Configuration.NAK_MAX_BACKOFF_DEFAULT, Configuration.NAK_GROUPSIZE_DEFAULT, Configuration.NAK_GRTT_DEFAULT); /** * Source uses same for unicast and multicast. For ticks. */ public static final FeedbackDelayGenerator RETRANS_UNICAST_DELAY_GENERATOR = () -> RETRANS_UNICAST_DELAY_DEFAULT_NS; public static final FeedbackDelayGenerator RETRANS_UNICAST_LINGER_GENERATOR = () -> RETRANS_UNICAST_LINGER_DEFAULT_NS; ======= private final long dataLossSeed; private final long controlLossSeed; private final double dataLossRate; private final double controlLossRate; private final int mtuLength; private final int termBufferLength; private final int initialWindowLength; >>>>>>> private final long dataLossSeed; private final long controlLossSeed; private final double dataLossRate; private final double controlLossRate; private final int mtuLength; private final int termBufferLength; private final int initialWindowLength; <<<<<<< public void onCreateConnection( final int sessionId, final int streamId, final int initialTermId, final int activeTermId, final int initialTermOffset, final int termBufferLength, final int senderMtuLength, final InetSocketAddress controlAddress, final InetSocketAddress sourceAddress, final ReceiveChannelEndpoint channelEndpoint) { channelEndpoint.validateSenderMtuLength(senderMtuLength); channelEndpoint.validateWindowMaxLength(initialWindowLength); final UdpChannel udpChannel = channelEndpoint.udpChannel(); final String channel = udpChannel.originalUriString(); final long correlationId = generateCreationCorrelationId(); final RawLog rawLog = rawLogFactory.newConnection( udpChannel.canonicalForm(), sessionId, streamId, correlationId, termBufferLength); final long joiningPosition = LogBufferDescriptor.computePosition( activeTermId, initialTermOffset, Integer.numberOfTrailingZeros(termBufferLength), initialTermId); final List<SubscriberPosition> subscriberPositions = subscriptions .stream() .filter((subscription) -> subscription.matches(streamId, channelEndpoint)) .map( (subscription) -> { final int positionCounterId = allocatePositionCounter( "subscriber pos", channel, sessionId, streamId, subscription.registrationId()); final UnsafeBufferPosition position = new UnsafeBufferPosition( countersBuffer, positionCounterId, countersManager); countersManager.setCounterValue(positionCounterId, joiningPosition); return new SubscriberPosition(subscription, position); }) .collect(toList()); final int receiverHwmCounterId = allocatePositionCounter("receiver hwm", channel, sessionId, streamId, correlationId); final String sourceInfo = generateSourceInfo(sourceAddress); clientProxy.onConnectionReady( channel, streamId, sessionId, joiningPosition, rawLog, correlationId, subscriberPositions, sourceInfo); final DriverConnection connection = new DriverConnection( correlationId, channelEndpoint, controlAddress, sessionId, streamId, initialTermId, activeTermId, initialTermOffset, initialWindowLength, rawLog, timerWheel, Configuration.dontSendNack() ? NO_NAK_DELAY_GENERATOR : udpChannel.isMulticast() ? NAK_MULTICAST_DELAY_GENERATOR : NAK_UNICAST_DELAY_GENERATOR, subscriberPositions.stream().map(SubscriberPosition::position).collect(toList()), new UnsafeBufferPosition(countersBuffer, receiverHwmCounterId, countersManager), clock, systemCounters, sourceAddress, logger); connections.add(connection); subscriberPositions.forEach( (subscriberPosition) -> subscriberPosition.subscription().addConnection(connection, subscriberPosition.position())); receiverProxy.newConnection(channelEndpoint, connection); } ======= >>>>>>>
<<<<<<< public void handleAsyncInvoke(Map<String, Object> inputParams, CRow input, ResultFuture<CRow> resultFuture) throws Exception { String key = buildCacheKey(inputParams); Map<String, String> keyValue = Maps.newHashMap(); List<String> value = async.keys(key + ":*").get(); String[] values = value.toArray(new String[value.size()]); if (values.length == 0) { dealMissKey(input, resultFuture); } else { RedisFuture<List<KeyValue<String, String>>> future = ((RedisStringAsyncCommands) async).mget(values); future.thenAccept(new Consumer<List<KeyValue<String, String>>>() { @Override public void accept(List<KeyValue<String, String>> keyValues) { if (keyValues.size() != 0) { for (int i = 0; i < keyValues.size(); i++) { String[] splitKeys = StringUtils.split(keyValues.get(i).getKey(), ":"); keyValue.put(splitKeys[1], splitKeys[2]); keyValue.put(splitKeys[3], keyValues.get(i).getValue()); } try { Row row = fillData(input.row(), keyValue); dealCacheData(key, CacheObj.buildCacheObj(ECacheContentType.MultiLine, keyValue)); resultFuture.complete(Collections.singleton(new CRow(row, input.change()))); } catch (Exception e) { dealFillDataError(resultFuture, e, input); } } else { dealMissKey(input, resultFuture); dealCacheData(key, CacheMissVal.getMissKeyObj()); ======= public void asyncInvoke(CRow input, ResultFuture<CRow> resultFuture) throws Exception { CRow inputCopy = new CRow(input.row(), input.change()); Map<String, Object> refData = Maps.newHashMap(); for (int i = 0; i < sideInfo.getEqualValIndex().size(); i++) { Integer conValIndex = sideInfo.getEqualValIndex().get(i); Object equalObj = input.row().getField(conValIndex); if(equalObj == null){ dealMissKey(inputCopy, resultFuture); return; } refData.put(sideInfo.getEqualFieldList().get(i), equalObj); } String key = buildCacheKey(refData); if(StringUtils.isBlank(key)){ return; } if(openCache()){ CacheObj val = getFromCache(key); if(val != null){ if(ECacheContentType.MissVal == val.getType()){ dealMissKey(inputCopy, resultFuture); return; }else if(ECacheContentType.MultiLine == val.getType()){ try { Row row = fillData(input.row(), val.getContent()); resultFuture.complete(Collections.singleton(new CRow(row, inputCopy.change()))); } catch (Exception e) { dealFillDataError(resultFuture, e, inputCopy); } }else{ RuntimeException exception = new RuntimeException("not support cache obj type " + val.getType()); resultFuture.completeExceptionally(exception); } return; } } RedisFuture<Map<String, String>> future = ((RedisHashAsyncCommands) async).hgetall(key); future.thenAccept(new Consumer<Map<String, String>>() { @Override public void accept(Map<String, String> values) { if (MapUtils.isNotEmpty(values)) { try { Row row = fillData(input.row(), values); dealCacheData(key,CacheObj.buildCacheObj(ECacheContentType.MultiLine, values)); resultFuture.complete(Collections.singleton(new CRow(row, inputCopy.change()))); } catch (Exception e) { dealFillDataError(resultFuture, e, inputCopy); >>>>>>> public void handleAsyncInvoke(Map<String, Object> inputParams, CRow input, ResultFuture<CRow> resultFuture) throws Exception { String key = buildCacheKey(inputParams); if(StringUtils.isBlank(key)){ return; } RedisFuture<Map<String, String>> future = ((RedisHashAsyncCommands) async).hgetall(key); future.thenAccept(new Consumer<Map<String, String>>() { @Override public void accept(Map<String, String> values) { if (MapUtils.isNotEmpty(values)) { try { Row row = fillData(input.row(), values); dealCacheData(key,CacheObj.buildCacheObj(ECacheContentType.MultiLine, values)); resultFuture.complete(Collections.singleton(new CRow(row, input.change()))); } catch (Exception e) { dealFillDataError(resultFuture, e, input); <<<<<<< @Override public String buildCacheKey(Map<String, Object> inputParams) { String kv = StringUtils.join(inputParams.values(), ":"); String tableName = redisSideTableInfo.getTableName(); StringBuilder preKey = new StringBuilder(); preKey.append(tableName).append(":").append(kv); return preKey.toString(); ======= private String buildCacheKey(Map<String, Object> refData) { StringBuilder keyBuilder = new StringBuilder(redisSideTableInfo.getTableName()); List<String> primaryKeys = redisSideTableInfo.getPrimaryKeys(); for(String primaryKey : primaryKeys){ if(!refData.containsKey(primaryKey)){ return null; } keyBuilder.append("_").append(refData.get(primaryKey)); } return keyBuilder.toString(); >>>>>>> @Override public String buildCacheKey(Map<String, Object> refData) { StringBuilder keyBuilder = new StringBuilder(redisSideTableInfo.getTableName()); List<String> primaryKeys = redisSideTableInfo.getPrimaryKeys(); for(String primaryKey : primaryKeys){ if(!refData.containsKey(primaryKey)){ return null; } keyBuilder.append("_").append(refData.get(primaryKey)); } return keyBuilder.toString();
<<<<<<< streamEnv.setRestartStrategy(RestartStrategies.failureRateRestart( ConfigConstrant.FAILUEE_RATE, Time.of(ConfigConstrant.FAILUEE_INTERVAL, TimeUnit.MINUTES), Time.of(ConfigConstrant.DELAY_INTERVAL, TimeUnit.SECONDS) )); ======= if(isRestore(confProperties).get()){ streamEnv.setRestartStrategy(RestartStrategies.failureRateRestart( ConfigConstrant.failureRate, Time.of(getFailureInterval(confProperties).get(), TimeUnit.MINUTES), Time.of(getDelayInterval(confProperties).get(), TimeUnit.SECONDS) )); } else { streamEnv.setRestartStrategy(RestartStrategies.noRestart()); } >>>>>>> if(isRestore(confProperties).get()){ streamEnv.setRestartStrategy(RestartStrategies.failureRateRestart( ConfigConstrant.FAILUEE_RATE, Time.of(getFailureInterval(confProperties).get(), TimeUnit.MINUTES), Time.of(getDelayInterval(confProperties).get(), TimeUnit.SECONDS) )); } else { streamEnv.setRestartStrategy(RestartStrategies.noRestart()); }
<<<<<<< import com.dtstack.flink.sql.constrant.ConfigConstrant; ======= import com.dtstack.flink.sql.classloader.ClassLoaderManager; >>>>>>> import com.dtstack.flink.sql.classloader.ClassLoaderManager; import com.dtstack.flink.sql.constrant.ConfigConstrant; <<<<<<< registerTableUDF(classPath, funcName, tableEnv, classLoader); }else if("AGGREGATE".equalsIgnoreCase(type)){ registerAggregateUDF(classPath, funcName, tableEnv, classLoader); ======= registerTableUDF(classPath, funcName, tableEnv, jarURList); >>>>>>> registerTableUDF(classPath, funcName, tableEnv, jarURList); }else if("AGGREGATE".equalsIgnoreCase(type)){ registerAggregateUDF(classPath, funcName, tableEnv, jarURList); <<<<<<< TableFunction udfFunc = Class.forName(classPath, false, classLoader) .asSubclass(TableFunction.class).newInstance(); if (tableEnv instanceof StreamTableEnvironment) { ((StreamTableEnvironment) tableEnv).registerFunction(funcName, udfFunc); } else if (tableEnv instanceof BatchTableEnvironment) { ((BatchTableEnvironment) tableEnv).registerFunction(funcName, udfFunc); } else { ======= TableFunction udfFunc = ClassLoaderManager.newInstance(jarURList, (cl) -> cl.loadClass(classPath).asSubclass(TableFunction.class).newInstance()); if(tableEnv instanceof StreamTableEnvironment){ ((StreamTableEnvironment)tableEnv).registerFunction(funcName, udfFunc); }else if(tableEnv instanceof BatchTableEnvironment){ ((BatchTableEnvironment)tableEnv).registerFunction(funcName, udfFunc); }else{ >>>>>>> TableFunction udfFunc = ClassLoaderManager.newInstance(jarURList, (cl) -> cl.loadClass(classPath).asSubclass(TableFunction.class).newInstance()); if(tableEnv instanceof StreamTableEnvironment){ ((StreamTableEnvironment)tableEnv).registerFunction(funcName, udfFunc); }else if(tableEnv instanceof BatchTableEnvironment){ ((BatchTableEnvironment)tableEnv).registerFunction(funcName, udfFunc); }else{
<<<<<<< public void asyncInvoke(Tuple2<Boolean,Row> input, ResultFuture<Tuple2<Boolean,Row>> resultFuture) throws Exception { Tuple2<Boolean, Row> inputCopy = Tuple2.of(input.f0, input.f1); //scannerBuilder 设置为null重新加载过滤条件 ======= public void handleAsyncInvoke(Map<String, Object> inputParams, CRow input, ResultFuture<CRow> resultFuture) throws Exception { CRow inputCopy = new CRow(input.row(), input.change()); //scannerBuilder 设置为null重新加载过滤条件,然后connkudu重新赋值 //todo:代码需要优化 >>>>>>> public void handleAsyncInvoke(Map<String, Object> inputParams, Tuple2<Boolean,Row> input, ResultFuture<Tuple2<Boolean,Row>> resultFuture) throws Exception { Tuple2<Boolean,Row> inputCopy = Tuple2.of(input.f0, Row.copy(input.f1)); //scannerBuilder 设置为null重新加载过滤条件,然后connkudu重新赋值 //todo:代码需要优化 <<<<<<< for (int i = 0; i < sideInfo.getEqualValIndex().size(); i++) { Object equalObj = inputCopy.f1.getField(sideInfo.getEqualValIndex().get(i)); if (equalObj == null) { dealMissKey(inputCopy, resultFuture); return; } //增加过滤条件 scannerBuilder.addPredicate(KuduPredicate.newInListPredicate(schema.getColumn(sideInfo.getEqualFieldList().get(i)), Collections.singletonList(equalObj))); inputParams.add(equalObj); } ======= inputParams.entrySet().forEach(e ->{ scannerBuilder.addPredicate(KuduPredicate.newInListPredicate(schema.getColumn(e.getKey()), Collections.singletonList(e.getValue()))); }); >>>>>>> inputParams.entrySet().forEach(e ->{ scannerBuilder.addPredicate(KuduPredicate.newInListPredicate(schema.getColumn(e.getKey()), Collections.singletonList(e.getValue()))); }); <<<<<<< String key = buildCacheKey(inputParams); if (openCache()) { //判断数据是否已经加载到缓存中 CacheObj val = getFromCache(key); if (val != null) { if (ECacheContentType.MissVal == val.getType()) { dealMissKey(inputCopy, resultFuture); return; } else if (ECacheContentType.SingleLine == val.getType()) { try { Row row = fillData(inputCopy.f1, val); resultFuture.complete(Collections.singleton(Tuple2.of(inputCopy.f0,row))); } catch (Exception e) { dealFillDataError(resultFuture, e, inputCopy); } } else if (ECacheContentType.MultiLine == val.getType()) { try { List<Tuple2<Boolean,Row>> rowList = Lists.newArrayList(); for (Object jsonArray : (List) val.getContent()) { Row row = fillData(inputCopy.f1, jsonArray); rowList.add(Tuple2.of(inputCopy.f0, row)); } resultFuture.complete(rowList); } catch (Exception e) { dealFillDataError(resultFuture, e, inputCopy); } } else { resultFuture.completeExceptionally(new RuntimeException("not support cache obj type " + val.getType())); } return; } } ======= >>>>>>>
<<<<<<< import org.apache.flink.configuration.Configuration; ======= import org.apache.flink.metrics.Counter; >>>>>>> import org.apache.flink.configuration.Configuration; import org.apache.flink.metrics.Counter; <<<<<<< private void outByJoinType(ResultFuture<CRow> resultFuture, Throwable e){ if(sideInfo.getJoinType() == JoinType.LEFT){ resultFuture.complete(null); return; } resultFuture.completeExceptionally(e); } private Map<String, Object> formatInputParam(Map<String, Object> inputParam){ Map<String, Object> result = Maps.newHashMap(); inputParam.forEach((k,v) -> { result.put(k, convertDataType(v)); }); return result; } private Object convertDataType(Object val) { if (val == null) { // OK } else if (val instanceof Number && !(val instanceof BigDecimal)) { // OK } else if (val instanceof Boolean) { // OK } else if (val instanceof String) { // OK } else if (val instanceof Character) { // OK } else if (val instanceof CharSequence) { } else if (val instanceof JsonObject) { } else if (val instanceof JsonArray) { } else if (val instanceof Map) { } else if (val instanceof List) { } else if (val instanceof byte[]) { } else if (val instanceof Instant) { } else if (val instanceof Timestamp) { val = DateUtil.timestampToString((Timestamp) val); } else if (val instanceof java.util.Date) { val = DateUtil.dateToString((java.util.Date)val); } else { val = val.toString(); } return val; } ======= >>>>>>> private Map<String, Object> formatInputParam(Map<String, Object> inputParam){ Map<String, Object> result = Maps.newHashMap(); inputParam.forEach((k,v) -> { result.put(k, convertDataType(v)); }); return result; } private Object convertDataType(Object val) { if (val == null) { // OK } else if (val instanceof Number && !(val instanceof BigDecimal)) { // OK } else if (val instanceof Boolean) { // OK } else if (val instanceof String) { // OK } else if (val instanceof Character) { // OK } else if (val instanceof CharSequence) { } else if (val instanceof JsonObject) { } else if (val instanceof JsonArray) { } else if (val instanceof Map) { } else if (val instanceof List) { } else if (val instanceof byte[]) { } else if (val instanceof Instant) { } else if (val instanceof Timestamp) { val = DateUtil.timestampToString((Timestamp) val); } else if (val instanceof java.util.Date) { val = DateUtil.dateToString((java.util.Date)val); } else { val = val.toString(); } return val; }
<<<<<<< this.headers = headers; currentTermId = new AtomicLong(initialTermId); ======= this.destination = destination; this.channelId = channelId; >>>>>>> this.headers = headers; this.destination = destination; this.channelId = channelId;
<<<<<<< ======= // udf和tableEnv须由同一个类加载器加载 ClassLoader levelClassLoader = tableEnv.getClass().getClassLoader(); URLClassLoader classLoader = null; >>>>>>> <<<<<<< FlinkUtil.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(), tableEnv, jarURList); ======= //classloader if (classLoader == null) { classLoader = FlinkUtil.loadExtraJar(jarURList, (URLClassLoader)levelClassLoader); } FlinkUtil.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(), tableEnv, classLoader); >>>>>>> FlinkUtil.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(), tableEnv, jarURList);
<<<<<<< private AeronArchive.Context archiveContext; private boolean ownsAeronClient = true; ======= private boolean ownsAeronClient; >>>>>>> private AeronArchive.Context archiveContext; private boolean ownsAeronClient;
<<<<<<< import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.api.operators.async.queue.StreamRecordQueueEntry; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService; ======= >>>>>>> import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.api.operators.async.queue.StreamRecordQueueEntry; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService; <<<<<<< import java.util.List; import java.util.Map; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; ======= >>>>>>> import java.util.List; import java.util.Map; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit;
<<<<<<< * 现在只支持ARRAY类型后续可以加入 MAP等类型 * * @param compositeTypeString * @return */ public static TypeInformation convertToCompositeType(String compositeTypeString) { Matcher matcher = matchCompositeType(compositeTypeString); final String errorMsg = "type " + compositeTypeString + "is not support!"; Preconditions.checkState(matcher.find(), errorMsg); String normalizedType = normalizeType(matcher.group(1)); Preconditions.checkState(ARRAY.equals(normalizedType), errorMsg); return convertToArray(compositeTypeString); } /** ======= >>>>>>> <<<<<<< /** * class 转成 TypeInformation * * @param fieldTypes * @return */ public static TypeInformation[] transformTypes(Class[] fieldTypes) { TypeInformation[] types = new TypeInformation[fieldTypes.length]; for (int i = 0; i < fieldTypes.length; i++) { types[i] = TypeInformation.of(fieldTypes[i]); } return types; } /** * class 转成 TypeInformation<Row> * * @param fieldTypes * @param fieldClasses * @return */ public static TypeInformation<Row> getRowTypeInformation(String[] fieldTypes, Class<?>[] fieldClasses) { TypeInformation[] types = IntStream.range(0, fieldClasses.length) .mapToObj(i -> { return TypeInformation.of(fieldClasses[i]); }) .toArray(TypeInformation[]::new); return new RowTypeInfo(types, fieldTypes); } ======= >>>>>>> /** * class 转成 TypeInformation * * @param fieldTypes * @return */ public static TypeInformation[] transformTypes(Class[] fieldTypes) { TypeInformation[] types = new TypeInformation[fieldTypes.length]; for (int i = 0; i < fieldTypes.length; i++) { types[i] = TypeInformation.of(fieldTypes[i]); } return types; } /** * class 转成 TypeInformation<Row> * * @param fieldTypes * @param fieldClasses * @return */ public static TypeInformation<Row> getRowTypeInformation(String[] fieldTypes, Class<?>[] fieldClasses) { TypeInformation[] types = IntStream.range(0, fieldClasses.length) .mapToObj(i -> { return TypeInformation.of(fieldClasses[i]); }) .toArray(TypeInformation[]::new); return new RowTypeInfo(types, fieldTypes); }
<<<<<<< import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl; import java.lang.reflect.Field; ======= import org.slf4j.Logger; import org.slf4j.LoggerFactory; >>>>>>> import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl; import java.lang.reflect.Field; import org.slf4j.Logger; import org.slf4j.LoggerFactory; <<<<<<< StreamTableEnvironment tableEnv = ExecuteProcessHelper.getStreamExecution(paramsInfo); StreamTableEnvironmentImpl tableEnvImpl = (StreamTableEnvironmentImpl) tableEnv; Field executionEnvironmentField = tableEnvImpl.getClass().getDeclaredField("executionEnvironment"); executionEnvironmentField.setAccessible(true); StreamExecutionEnvironment env = (StreamExecutionEnvironment) executionEnvironmentField.get(tableEnvImpl); ======= paramsInfo.setGetPlan(true); StreamExecutionEnvironment env = ExecuteProcessHelper.getStreamExecution(paramsInfo); >>>>>>> paramsInfo.setGetPlan(true); StreamTableEnvironment tableEnv = ExecuteProcessHelper.getStreamExecution(paramsInfo); StreamTableEnvironmentImpl tableEnvImpl = (StreamTableEnvironmentImpl) tableEnv; Field executionEnvironmentField = tableEnvImpl.getClass().getDeclaredField("executionEnvironment"); executionEnvironmentField.setAccessible(true); StreamExecutionEnvironment env = (StreamExecutionEnvironment) executionEnvironmentField.get(tableEnvImpl);
<<<<<<< import com.dtstack.flink.sql.classloader.DtClassLoader; import com.dtstack.flink.sql.constrant.ConfigConstrant; import com.dtstack.flink.sql.enums.ClusterMode; ======= import com.dtstack.flink.sql.classloader.ClassLoaderManager; >>>>>>> import com.dtstack.flink.sql.classloader.ClassLoaderManager; import com.dtstack.flink.sql.constrant.ConfigConstrant; import com.dtstack.flink.sql.enums.ClusterMode; <<<<<<< ======= if(env instanceof MyLocalStreamEnvironment) { ((MyLocalStreamEnvironment) env).setClasspaths(ClassLoaderManager.getClassPath()); } >>>>>>> <<<<<<< ======= //register urf >>>>>>> //register urf <<<<<<< FlinkUtil.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(), tableEnv, classLoader); ======= FlinkUtil.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(), tableEnv, jarURList); >>>>>>> FlinkUtil.registerUDF(funcInfo.getType(), funcInfo.getClassName(), funcInfo.getName(), tableEnv, jarURList);
<<<<<<< StreamTableEnvironment tableEnv = ExecuteProcessHelper.getStreamExecution(paramsInfo); StreamTableEnvironmentImpl tableEnvImpl = (StreamTableEnvironmentImpl) tableEnv; Field executionEnvironmentField = tableEnvImpl.getClass().getDeclaredField("executionEnvironment"); executionEnvironmentField.setAccessible(true); StreamExecutionEnvironment env = (StreamExecutionEnvironment) executionEnvironmentField.get(tableEnvImpl); ======= ClassLoader envClassLoader = StreamExecutionEnvironment.class.getClassLoader(); ClassLoader plannerClassLoader = URLClassLoader.newInstance(new URL[0], envClassLoader); Thread.currentThread().setContextClassLoader(plannerClassLoader); StreamExecutionEnvironment env = ExecuteProcessHelper.getStreamExecution(paramsInfo); >>>>>>> ClassLoader envClassLoader = StreamExecutionEnvironment.class.getClassLoader(); ClassLoader plannerClassLoader = URLClassLoader.newInstance(new URL[0], envClassLoader); Thread.currentThread().setContextClassLoader(plannerClassLoader); StreamTableEnvironment tableEnv = ExecuteProcessHelper.getStreamExecution(paramsInfo); StreamTableEnvironmentImpl tableEnvImpl = (StreamTableEnvironmentImpl) tableEnv; Field executionEnvironmentField = tableEnvImpl.getClass().getDeclaredField("executionEnvironment"); executionEnvironmentField.setAccessible(true); StreamExecutionEnvironment env = (StreamExecutionEnvironment) executionEnvironmentField.get(tableEnvImpl);
<<<<<<< public CarpetScriptHost globalHost; public Map<String, CarpetScriptHost> modules; ======= public final MinecraftServer server; public final CarpetScriptHost globalHost; public final Map<String, CarpetScriptHost> modules; >>>>>>> public final MinecraftServer server; public CarpetScriptHost globalHost; public Map<String, CarpetScriptHost> modules; <<<<<<< init(); } private void init() { ======= this.server = server; >>>>>>> init(); } private void init() { this.server = server; <<<<<<< File folder = CarpetServer.minecraft_server.getSavePath(WorldSavePath.ROOT).resolve("scripts").toFile(); ======= File folder = server.getLevelStorage().resolveFile( server.getLevelName(), "scripts"); >>>>>>> File folder = CarpetServer.minecraft_server.getSavePath(WorldSavePath.ROOT).resolve("scripts").toFile(); File folder = server.getLevelStorage().resolveFile( server.getLevelName(), "scripts"); <<<<<<< File folder = CarpetServer.minecraft_server.getSavePath(WorldSavePath.ROOT).resolve("scripts").toFile(); ======= File folder = server.getLevelStorage().resolveFile( server.getLevelName(), "scripts"); >>>>>>> File folder = CarpetServer.minecraft_server.getSavePath(WorldSavePath.ROOT).resolve("scripts").toFile(); File folder = server.getLevelStorage().resolveFile( server.getLevelName(), "scripts");
<<<<<<< import net.minecraft.util.registry.RegistryKey; import net.minecraft.world.World; ======= import org.apache.commons.lang3.tuple.Pair; >>>>>>> import net.minecraft.util.registry.RegistryKey; import net.minecraft.world.World; import org.apache.commons.lang3.tuple.Pair; <<<<<<< protected RegistryKey<World> entityDimension; ======= protected String snapTo; protected boolean snapX, snapY, snapZ; public DimensionType shapeDimension; >>>>>>> protected String snapTo; protected boolean snapX, snapY, snapZ; protected RegistryKey<World> shapeDimension; <<<<<<< entityDimension = RegistryKey.of(Registry.DIMENSION, new Identifier(options.get("dim").getString())); ======= snapTo = options.getOrDefault("snap", optional.get("snap")).getString().toLowerCase(Locale.ROOT); snapX = snapTo.contains("x"); snapY = snapTo.contains("y"); snapZ = snapTo.contains("z"); >>>>>>> snapTo = options.getOrDefault("snap", optional.get("snap")).getString().toLowerCase(Locale.ROOT); snapX = snapTo.contains("x"); snapY = snapTo.contains("y"); snapZ = snapTo.contains("z");
<<<<<<< import org.apache.flink.client.program.PackagedProgramUtils; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.runtime.jobgraph.JobGraph; ======= import org.apache.flink.client.program.PackagedProgramUtils; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.jobgraph.JobGraph; >>>>>>> import org.apache.flink.client.program.PackagedProgramUtils; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.runtime.jobgraph.JobGraph; <<<<<<< ======= String pluginRoot = launcherOptions.getLocalSqlPluginPath(); File jarFile = new File(getLocalCoreJarPath(pluginRoot)); String[] remoteArgs = argList.toArray(new String[argList.size()]); PackagedProgram program = new PackagedProgram(jarFile, Lists.newArrayList(), remoteArgs); if(StringUtils.isNotBlank(launcherOptions.getSavePointPath())){ program.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(launcherOptions.getSavePointPath(), BooleanUtils.toBoolean(launcherOptions.getAllowNonRestoredState()))); } //final JobGraph jobGraph; //jobGraph = PackagedProgramUtils.createJobGraph(program, new Configuration(), 1); //clusterClient.runDetached(jobGraph,null); >>>>>>> pluginRoot = launcherOptions.getLocalSqlPluginPath(); jarFile = new File(getLocalCoreJarPath(pluginRoot)); remoteArgs = argList.toArray(new String[argList.size()]); program = new PackagedProgram(jarFile, Lists.newArrayList(), remoteArgs); if(StringUtils.isNotBlank(launcherOptions.getSavePointPath())){ program.setSavepointRestoreSettings(SavepointRestoreSettings.forPath(launcherOptions.getSavePointPath(), BooleanUtils.toBoolean(launcherOptions.getAllowNonRestoredState()))); } //final JobGraph jobGraph; //jobGraph = PackagedProgramUtils.createJobGraph(program, new Configuration(), 1); //clusterClient.runDetached(jobGraph,null);
<<<<<<< import net.minecraft.util.TickDurationMonitor; ======= import net.minecraft.server.world.ServerWorld; >>>>>>> import net.minecraft.util.TickDurationMonitor; import net.minecraft.server.world.ServerWorld; <<<<<<< @Shadow protected abstract void startMonitor(TickDurationMonitor monitor); @Shadow private long lastTimeReference; @Shadow private boolean waitingForNextTick; ======= @Shadow public abstract Iterable<ServerWorld> getWorlds(); >>>>>>> @Shadow protected abstract void startMonitor(TickDurationMonitor monitor); @Shadow private long lastTimeReference; @Shadow private boolean waitingForNextTick; @Shadow public abstract Iterable<ServerWorld> getWorlds(); <<<<<<< this.waitingForNextTick = true; this.field_19248 = Math.max(Util.getMeasuringTimeMs() + /*50L*/ msThisTick, this.timeReference); if (TickSpeed.time_warp_start_time != 0) ======= if (TickSpeed.time_warp_start_time != 0) // clearing all hanging tasks no matter what when warping >>>>>>> if (TickSpeed.time_warp_start_time != 0) // clearing all hanging tasks no matter what when warping
<<<<<<< public void flatMap(Row input, Collector<BaseRow> out) throws Exception { Map<String, String> inputParams = Maps.newHashMap(); for(Integer conValIndex : sideInfo.getEqualValIndex()){ Object equalObj = input.getField(conValIndex); ======= public void flatMap(CRow input, Collector<CRow> out) throws Exception { Map<String, Object> inputParams = Maps.newHashMap(); for (int i = 0; i < sideInfo.getEqualValIndex().size(); i++) { Integer conValIndex = sideInfo.getEqualValIndex().get(i); Object equalObj = input.row().getField(conValIndex); >>>>>>> public void flatMap(Row input, Collector<BaseRow> out) throws Exception { Map<String, Object> inputParams = Maps.newHashMap(); for(Integer conValIndex : sideInfo.getEqualValIndex()){ Object equalObj = input.getField(conValIndex);
<<<<<<< @OptionRequired(description = "sql planner") private String planner = PlannerType.FLINK.name(); ======= @OptionRequired(description = "dirty plugin properties") private String dirtyProperties; >>>>>>> @OptionRequired(description = "dirty plugin properties") private String dirtyProperties; @OptionRequired(description = "sql planner") private String planner = PlannerType.FLINK.name(); <<<<<<< public void setPlanner(String planner) { this.planner = planner; } public String getPlanner() { return planner; } } ======= public String getDirtyProperties() { return dirtyProperties; } public void setDirtyProperties(String dirtyProperties) { this.dirtyProperties = dirtyProperties; } } >>>>>>> public void setPlanner(String planner) { this.planner = planner; } public String getPlanner() { return planner; } public String getDirtyProperties() { return dirtyProperties; } public void setDirtyProperties(String dirtyProperties) { this.dirtyProperties = dirtyProperties; } }
<<<<<<< private SideSQLParser sideSqlParser = new SideSQLParser(); ======= >>>>>>> <<<<<<< public void exec(String sql, Map<String, AbstractSideTableInfo> sideTableMap, StreamTableEnvironment tableEnv, Map<String, Table> tableCache, StreamQueryConfig queryConfig) throws Exception { ======= public void exec(String sql, Map<String, SideTableInfo> sideTableMap, StreamTableEnvironment tableEnv, Map<String, Table> tableCache, StreamQueryConfig queryConfig, CreateTmpTableParser.SqlParserResult createView) throws Exception { >>>>>>> public void exec(String sql, Map<String, SideTableInfo> sideTableMap, StreamTableEnvironment tableEnv, Map<String, Table> tableCache, StreamQueryConfig queryConfig, CreateTmpTableParser.SqlParserResult createView) throws Exception { <<<<<<< sideSqlParser.setLocalTableCache(localTableCache); Queue<Object> exeQueue = sideSqlParser.getExeQueue(sql, sideTableMap.keySet()); ======= if(createView != null){ LOG.warn("create view info\n"); LOG.warn(createView.getExecSql()); LOG.warn("-----------------"); } SideSQLParser sideSQLParser = new SideSQLParser(); sideSQLParser.setLocalTableCache(localTableCache); Queue<Object> exeQueue = sideSQLParser.getExeQueue(sql, sideTableMap.keySet()); >>>>>>> if(createView != null){ LOG.warn("create view info\n"); LOG.warn(createView.getExecSql()); LOG.warn("-----------------"); } SideSQLParser sideSQLParser = new SideSQLParser(); sideSQLParser.setLocalTableCache(localTableCache); Queue<Object> exeQueue = sideSQLParser.getExeQueue(sql, sideTableMap.keySet()); <<<<<<< AliasInfo aliasInfo = parseAsNode(pollSqlNode); Table table = tableEnv.sqlQuery(aliasInfo.getName()); tableEnv.registerTable(aliasInfo.getAlias(), table); localTableCache.put(aliasInfo.getAlias(), table); FieldReplaceInfo fieldReplaceInfo = parseAsQuery((SqlBasicCall) pollSqlNode, tableCache); if(fieldReplaceInfo != null){ replaceInfoList.add(fieldReplaceInfo); } ======= dealAsSourceTable(tableEnv, pollSqlNode, tableCache, replaceInfoList); >>>>>>> dealAsSourceTable(tableEnv, pollSqlNode, tableCache, replaceInfoList); <<<<<<< tableEnv.registerTable(tableAlias, table); ======= tableEnv.registerTable(TableAlias, table); } else if (pollSqlNode.getKind() == SELECT){ Preconditions.checkState(createView != null, "select sql must included by create view"); Table table = tableEnv.sqlQuery(pollObj.toString()); if (createView.getFieldsInfoStr() == null){ tableEnv.registerTable(createView.getTableName(), table); } else { if (checkFieldsInfo(createView, table)){ table = table.as(tmpFields); tableEnv.registerTable(createView.getTableName(), table); } else { throw new RuntimeException("Fields mismatch"); } } localTableCache.put(createView.getTableName(), table); >>>>>>> tableEnv.registerTable(TableAlias, table); } else if (pollSqlNode.getKind() == SELECT){ Preconditions.checkState(createView != null, "select sql must included by create view"); Table table = tableEnv.sqlQuery(pollObj.toString()); if (createView.getFieldsInfoStr() == null){ tableEnv.registerTable(createView.getTableName(), table); } else { if (checkFieldsInfo(createView, table)){ table = table.as(tmpFields); tableEnv.registerTable(createView.getTableName(), table); } else { throw new RuntimeException("Fields mismatch"); } } localTableCache.put(createView.getTableName(), table);
<<<<<<< DtClassLoader dtClassLoader = (DtClassLoader) classLoader; PluginUtil.addPluginJar(pluginJarPath, dtClassLoader); String className = PluginUtil.getSqlSideClassName(sideType, "side", OPERATOR_TYPE); return dtClassLoader.loadClass(className).asSubclass(AsyncReqRow.class) .getConstructor(RowTypeInfo.class, JoinInfo.class, List.class, SideTableInfo.class).newInstance(rowTypeInfo, joinInfo, outFieldInfoList, sideTableInfo); ======= String className = PluginUtil.getSqlSideClassName(sideType, "side", "Async"); return ClassLoaderManager.newInstance(pluginJarPath, (cl) -> cl.loadClass(className).asSubclass(AsyncReqRow.class) .getConstructor(RowTypeInfo.class, JoinInfo.class, List.class, SideTableInfo.class) .newInstance(rowTypeInfo, joinInfo, outFieldInfoList, sideTableInfo)); >>>>>>> String className = PluginUtil.getSqlSideClassName(sideType, "side", OPERATOR_TYPE); return ClassLoaderManager.newInstance(pluginJarPath, (cl) -> cl.loadClass(className).asSubclass(AsyncReqRow.class) .getConstructor(RowTypeInfo.class, JoinInfo.class, List.class, SideTableInfo.class) .newInstance(rowTypeInfo, joinInfo, outFieldInfoList, sideTableInfo));
<<<<<<< public static List<BaseText> printEntitiesByType(SpawnGroup cat, World worldIn) //Class<?> entityType) ======= public static List<BaseText> printEntitiesByType(EntityCategory cat, World worldIn, boolean all) //Class<?> entityType) >>>>>>> public static List<BaseText> printEntitiesByType(SpawnGroup cat, World worldIn, boolean all) //Class<?> entityType)
<<<<<<< import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; import org.apache.flink.table.api.Table; import org.apache.flink.table.api.TableEnvironment; import org.apache.flink.table.api.java.StreamTableEnvironment; import org.apache.flink.table.sinks.TableSink; import org.apache.flink.types.Row; ======= >>>>>>> <<<<<<< StreamTableEnvironment tableEnv = getStreamTableEnv(env,paramsInfo.getConfProp()); ======= StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamQueryConfig streamQueryConfig = StreamEnvConfigManager.getStreamQueryConfig(tableEnv, paramsInfo.getConfProp()); // init global flinkPlanner FlinkPlanner.createFlinkPlanner(tableEnv.getFrameworkConfig(), tableEnv.getPlanner(), tableEnv.getTypeFactory()); >>>>>>> StreamTableEnvironment tableEnv = getStreamTableEnv(env, paramsInfo.getConfProp()); StreamQueryConfig streamQueryConfig = StreamEnvConfigManager.getStreamQueryConfig(tableEnv, paramsInfo.getConfProp()); <<<<<<< sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache, result); ======= sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache, queryConfig, result, scope + ""); scope++; >>>>>>> sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache, queryConfig, result, scope + ""); scope++; <<<<<<< sideSqlExec.exec(tmp.getExecSql(), sideTableMap, tableEnv, registerTableCache, tmp); ======= sideSqlExec.exec(tmp.getExecSql(), sideTableMap, tableEnv, registerTableCache, queryConfig, tmp, scope + ""); >>>>>>> sideSqlExec.exec(tmp.getExecSql(), sideTableMap, tableEnv, registerTableCache, queryConfig, tmp, scope + ""); <<<<<<< sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache, null); ======= sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache, queryConfig, null, null); >>>>>>> sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache, queryConfig, null, null); <<<<<<< private static StreamTableEnvironment getStreamTableEnv(StreamExecutionEnvironment env, Properties confProperties) { // use blink and streammode EnvironmentSettings settings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings); StreamEnvConfigManager.streamTableEnvironmentStateTTLConfig(tableEnv, confProperties); return tableEnv; } public static void setLogLevel(ParamsInfo paramsInfo){ String logLevel = paramsInfo.getConfProp().getProperty(ConfigConstrant.LOG_LEVEL_KEY); if(org.apache.commons.lang3.StringUtils.isBlank(logLevel)){ return; } ChangeLogLevelProcess logLevelProcess = new ChangeLogLevelProcess(); logLevelProcess.process(logLevel); } ======= >>>>>>> private static StreamTableEnvironment getStreamTableEnv(StreamExecutionEnvironment env, Properties confProperties) { // use blink and streammode EnvironmentSettings settings = EnvironmentSettings.newInstance() .useBlinkPlanner() .inStreamingMode() .build(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings); StreamEnvConfigManager.streamTableEnvironmentStateTTLConfig(tableEnv, confProperties); return tableEnv; }
<<<<<<< import net.minecraft.util.math.BlockPos; ======= import net.minecraft.util.math.Vec3d; >>>>>>> import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d;
<<<<<<< import com.dtstack.flink.sql.classloader.DtClassLoader; ======= import com.dtstack.flink.sql.dirtyManager.manager.DirtyDataManager; >>>>>>> import com.dtstack.flink.sql.classloader.DtClassLoader; import com.dtstack.flink.sql.dirtyManager.manager.DirtyDataManager; <<<<<<< String planner = options.getPlanner(); ======= String dirtyStr = options.getDirtyProperties(); >>>>>>> String planner = options.getPlanner(); String dirtyStr = options.getDirtyProperties(); <<<<<<< Set<URL> classPathSets = ExecuteProcessHelper.registerTable( sqlTree , env , tableEnv , paramsInfo.getLocalSqlPluginPath() , paramsInfo.getRemoteSqlPluginPath() , paramsInfo.getPluginLoadMode() , sideTableMap , registerTableCache , planner); ======= Set<URL> classPathSets = ExecuteProcessHelper.registerTable( sqlTree , env , tableEnv , paramsInfo.getLocalSqlPluginPath() , paramsInfo.getRemoteSqlPluginPath() , paramsInfo.getPluginLoadMode() , paramsInfo.getDirtyProperties() , sideTableMap , registerTableCache); >>>>>>> Set<URL> classPathSets = ExecuteProcessHelper.registerTable( sqlTree , env , tableEnv , paramsInfo.getLocalSqlPluginPath() , paramsInfo.getRemoteSqlPluginPath() , paramsInfo.getPluginLoadMode() , paramsInfo.getDirtyProperties() , sideTableMap , registerTableCache , planner); <<<<<<< ExecuteProcessHelper.sqlTranslation( paramsInfo.getLocalSqlPluginPath() , paramsInfo.getPluginLoadMode() , tableEnv , sqlTree , sideTableMap , registerTableCache , planner); ======= ExecuteProcessHelper.sqlTranslation(paramsInfo.getLocalSqlPluginPath(), paramsInfo.getPluginLoadMode(), tableEnv, sqlTree, sideTableMap, registerTableCache); >>>>>>> ExecuteProcessHelper.sqlTranslation( paramsInfo.getLocalSqlPluginPath() , paramsInfo.getPluginLoadMode() , tableEnv , sqlTree , sideTableMap , registerTableCache , planner); <<<<<<< SqlTree sqlTree, Map<String, AbstractSideTableInfo> sideTableMap, Map<String, Table> registerTableCache, String planner) throws Exception { ======= SqlTree sqlTree, Map<String, AbstractSideTableInfo> sideTableMap, Map<String, Table> registerTableCache) throws Exception { SideSqlExec sideSqlExec = new SideSqlExec(); sideSqlExec.setLocalSqlPluginPath(localSqlPluginPath); sideSqlExec.setPluginLoadMode(pluginLoadMode); int scope = 0; for (CreateTmpTableParser.SqlParserResult result : sqlTree.getTmpSqlList()) { sideSqlExec.exec(result.getExecSql(), sideTableMap, tableEnv, registerTableCache, result, scope + ""); scope++; } >>>>>>> SqlTree sqlTree, Map<String, AbstractSideTableInfo> sideTableMap, Map<String, Table> registerTableCache, String planner) throws Exception { <<<<<<< public static Set<URL> registerTable( SqlTree sqlTree , StreamExecutionEnvironment env , StreamTableEnvironment tableEnv , String localSqlPluginPath , String remoteSqlPluginPath , String pluginLoadMode , Map<String, AbstractSideTableInfo> sideTableMap , Map<String, Table> registerTableCache , String planner) throws Exception { ======= public static Set<URL> registerTable( SqlTree sqlTree , StreamExecutionEnvironment env , StreamTableEnvironment tableEnv , String localSqlPluginPath , String remoteSqlPluginPath , String pluginLoadMode , Properties dirtyProperties , Map<String, AbstractSideTableInfo> sideTableMap , Map<String, Table> registerTableCache ) throws Exception { >>>>>>> public static Set<URL> registerTable( SqlTree sqlTree , StreamExecutionEnvironment env , StreamTableEnvironment tableEnv , String localSqlPluginPath , String remoteSqlPluginPath , String pluginLoadMode , Properties dirtyProperties , Map<String, AbstractSideTableInfo> sideTableMap , Map<String, Table> registerTableCache , String planner ) throws Exception {
<<<<<<< upload = dbConn.prepareStatement(insertQuery); initMetric(); ======= if (dbConn.getMetaData().getTables(null, null, tableName, null).next()){ upload = dbConn.prepareStatement(insertQuery); } else { throw new SQLException("Table " + tableName +" doesn't exist"); } >>>>>>> upload = dbConn.prepareStatement(insertQuery); initMetric(); if (dbConn.getMetaData().getTables(null, null, tableName, null).next()){ upload = dbConn.prepareStatement(insertQuery); } else { throw new SQLException("Table " + tableName +" doesn't exist"); }
<<<<<<< int fieldTypeLength = resolved.getRowTypeInfo().getFieldTypes().length; if(fieldTypeLength == 2 && resolved.getRowTypeInfo().getFieldTypes()[1].getClass().equals(RowTypeInfo.class)){ field = (RowTypeInfo) resolved.getRowTypeInfo().getFieldTypes()[1]; } else if(fieldTypeLength ==1 && resolved.getRowTypeInfo().getFieldTypes()[0].getClass().equals(RowTypeInfo.class)){ field = (RowTypeInfo) resolved.getRowTypeInfo().getFieldTypes()[0]; }else{ field = resolved.getRowTypeInfo(); } ======= field = resolved.getRowTypeInfo(); baseRowTypeInfo = resolved.getBaseRowTypeInfo(); >>>>>>> int fieldTypeLength = resolved.getRowTypeInfo().getFieldTypes().length; if(fieldTypeLength == 2 && resolved.getRowTypeInfo().getFieldTypes()[1].getClass().equals(RowTypeInfo.class)){ field = (RowTypeInfo) resolved.getRowTypeInfo().getFieldTypes()[1]; } else if(fieldTypeLength ==1 && resolved.getRowTypeInfo().getFieldTypes()[0].getClass().equals(RowTypeInfo.class)){ field = (RowTypeInfo) resolved.getRowTypeInfo().getFieldTypes()[0]; }else{ field = resolved.getRowTypeInfo(); } baseRowTypeInfo = resolved.getBaseRowTypeInfo();
<<<<<<< public void fillPredicatesForSideTable(String exeSql, Map<String, SideTableInfo> sideTableMap) throws SqlParseException { FlinkPlannerImpl flinkPlanner = FlinkPlanner.getFlinkPlanner(); SqlNode sqlNode = flinkPlanner.parse(exeSql); ======= public void fillPredicatesForSideTable(String exeSql, Map<String, AbstractSideTableInfo> sideTableMap) throws SqlParseException { SqlParser sqlParser = SqlParser.create(exeSql, CalciteConfig.MYSQL_LEX_CONFIG); SqlNode sqlNode = sqlParser.parseStmt(); >>>>>>> public void fillPredicatesForSideTable(String exeSql, Map<String, AbstractSideTableInfo> sideTableMap) throws SqlParseException { FlinkPlannerImpl flinkPlanner = FlinkPlanner.getFlinkPlanner(); SqlNode sqlNode = flinkPlanner.parse(exeSql);
<<<<<<< import org.apache.flink.types.Row; import org.hbase.async.Scanner; import org.hbase.async.*; ======= import org.apache.flink.table.dataformat.BaseRow; import org.hbase.async.BinaryPrefixComparator; import org.hbase.async.Bytes; import org.hbase.async.CompareFilter; import org.hbase.async.HBaseClient; import org.hbase.async.KeyValue; import org.hbase.async.RowFilter; import org.hbase.async.ScanFilter; import org.hbase.async.Scanner; >>>>>>> import org.apache.flink.types.Row; import org.apache.flink.table.dataformat.BaseRow; import org.hbase.async.BinaryPrefixComparator; import org.hbase.async.Bytes; import org.hbase.async.CompareFilter; import org.hbase.async.HBaseClient; import org.hbase.async.KeyValue; import org.hbase.async.RowFilter; import org.hbase.async.ScanFilter; import org.hbase.async.Scanner; <<<<<<< public void asyncGetData(String tableName, String rowKeyStr, Row input, ResultFuture<Row> resultFuture, ======= public void asyncGetData(String tableName, String rowKeyStr, BaseRow input, ResultFuture<BaseRow> resultFuture, >>>>>>> public void asyncGetData(String tableName, String rowKeyStr, Row input, ResultFuture<Row> resultFuture, <<<<<<< private String dealOneRow(ArrayList<ArrayList<KeyValue>> args, String rowKeyStr, Row input, ResultFuture<Row> resultFuture, AbstractSideCache sideCache) { if (args == null || args.size() == 0) { ======= private String dealOneRow(ArrayList<ArrayList<KeyValue>> args, String rowKeyStr, BaseRow input, ResultFuture<BaseRow> resultFuture, AbstractSideCache sideCache) { if(args == null || args.size() == 0){ >>>>>>> private String dealOneRow(ArrayList<ArrayList<KeyValue>> args, String rowKeyStr, Row input, ResultFuture<Row> resultFuture, AbstractSideCache sideCache) { if (args == null || args.size() == 0) {
<<<<<<< import com.dtstack.flink.sql.sink.rdb.format.JDBCUpsertOutputFormat; ======= import com.dtstack.flink.sql.sink.rdb.format.ExtendOutputFormat; import com.dtstack.flink.sql.sink.rdb.format.RetractJDBCOutputFormat; import com.dtstack.flink.sql.table.TableInfo; import com.dtstack.flink.sql.util.DtStringUtil; import org.apache.commons.lang3.StringUtils; import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.stream.Collectors; >>>>>>> import com.dtstack.flink.sql.sink.rdb.format.JDBCUpsertOutputFormat; <<<<<<< ======= private static final String ORACLE_DRIVER = "oracle.jdbc.driver.OracleDriver"; private final String SQL_DEFAULT_PLACEHOLDER = " ? "; private final String DEAL_CHAR_KEY = "char"; private String RPAD_FORMAT = " rpad(?, %d, ' ') "; @Override public String getDriverName() { return ORACLE_DRIVER; } >>>>>>> private final String SQL_DEFAULT_PLACEHOLDER = " ? "; private final String DEAL_CHAR_KEY = "char"; private String RPAD_FORMAT = " rpad(?, %d, ' ') "; <<<<<<< ======= private void buildInsertSql(String scheam, String tableName, List<String> fields) { tableName = DtStringUtil.getTableFullPath(scheam,tableName); String sqlTmp = "insert into " + tableName + " (${fields}) values (${placeholder})"; List<String> adaptFields = Lists.newArrayList(); fields.forEach(field -> adaptFields.add(DtStringUtil.addQuoteForStr(field))); String fieldsStr = StringUtils.join(adaptFields, ","); String placeholder = ""; for (String fieldName : fields) { placeholder += ",?"; } placeholder = placeholder.replaceFirst(",", ""); sqlTmp = sqlTmp.replace("${fields}", fieldsStr).replace("${placeholder}", placeholder); this.sql = sqlTmp; } /** * use MERGE INTO build oracle replace into sql * @param tableName * @param fieldNames create table contained column columns * @param realIndexes <key: indexName, value: index contains columns > * @param fullField real columns , query from db * @return */ @Override public String buildUpdateSql(String scheam, String tableName, List<String> fieldNames, Map<String, List<String>> realIndexes, List<String> fullField) { tableName = DtStringUtil.getTableFullPath(scheam, tableName); StringBuilder sb = new StringBuilder(); sb.append("MERGE INTO " + tableName + " T1 USING " + "(" + makeValues(fieldNames) + ") T2 ON (" + updateKeySql(realIndexes) + ") "); String updateSql = getUpdateSql(fieldNames, fullField, "T1", "T2", keyColList(realIndexes)); if (StringUtils.isNotEmpty(updateSql)) { sb.append(" WHEN MATCHED THEN UPDATE SET "); sb.append(updateSql); } sb.append(" WHEN NOT MATCHED THEN " + "INSERT (" + quoteColumns(fieldNames) + ") VALUES (" + quoteColumns(fieldNames, "T2") + ")"); return sb.toString(); } public String quoteColumns(List<String> column) { return quoteColumns(column, null); } public String quoteColumns(List<String> column, String table) { String prefix = StringUtils.isBlank(table) ? "" : DtStringUtil.addQuoteForStr(table) + "."; List<String> list = new ArrayList<>(); for (String col : column) { list.add(prefix + DtStringUtil.addQuoteForStr(col)); } return StringUtils.join(list, ","); } /** * extract all distinct index column * @param realIndexes * @return */ protected List<String> keyColList(Map<String, List<String>> realIndexes) { List<String> keyCols = new ArrayList<>(); for (Map.Entry<String, List<String>> entry : realIndexes.entrySet()) { List<String> list = entry.getValue(); for (String col : list) { if (!containsIgnoreCase(keyCols,col)) { keyCols.add(col); } } } return keyCols; } /** * build update sql , such as UPDATE SET "T1".A="T2".A * @param updateColumn create table contained column columns * @param fullColumn real columns , query from db * @param leftTable alias * @param rightTable alias * @param indexCols index column * @return */ public String getUpdateSql(List<String> updateColumn, List<String> fullColumn, String leftTable, String rightTable, List<String> indexCols) { String prefixLeft = StringUtils.isBlank(leftTable) ? "" : DtStringUtil.addQuoteForStr(leftTable) + "."; String prefixRight = StringUtils.isBlank(rightTable) ? "" : DtStringUtil.addQuoteForStr(rightTable) + "."; List<String> list = new ArrayList<>(); for (String col : fullColumn) { // filter index column if (indexCols == null || indexCols.size() == 0 || containsIgnoreCase(indexCols,col)) { continue; } if (containsIgnoreCase(updateColumn,col)) { list.add(prefixLeft + DtStringUtil.addQuoteForStr(col) + "=" + prefixRight + DtStringUtil.addQuoteForStr(col)); } else { list.add(prefixLeft + DtStringUtil.addQuoteForStr(col) + "=null"); } } return StringUtils.join(list, ","); } /** * build connect sql by index column, such as T1."A"=T2."A" * @param updateKey * @return */ public String updateKeySql(Map<String, List<String>> updateKey) { List<String> exprList = new ArrayList<>(); for (Map.Entry<String, List<String>> entry : updateKey.entrySet()) { List<String> colList = new ArrayList<>(); for (String col : entry.getValue()) { colList.add("T1." + DtStringUtil.addQuoteForStr(col) + "=T2." + DtStringUtil.addQuoteForStr(col)); } exprList.add(StringUtils.join(colList, " AND ")); } return StringUtils.join(exprList, " OR "); } /** * build select sql , such as (SELECT ? "A",? "B" FROM DUAL) * * @param column destination column * @return */ public String makeValues(List<String> column) { StringBuilder sb = new StringBuilder("SELECT "); String collect = column.stream() .map(col -> wrapperPlaceholder(col) + DtStringUtil.addQuoteForStr(col)) .collect(Collectors.joining(", ")); sb.append(collect).append(" FROM DUAL"); return sb.toString(); } /** * char type is wrapped with rpad * @param fieldName * @return */ public String wrapperPlaceholder(String fieldName) { int pos = rdbTableInfo.getFieldList().indexOf(fieldName); String type = rdbTableInfo.getFieldTypeList().get(pos); if (StringUtils.contains(type.toLowerCase(), DEAL_CHAR_KEY)) { TableInfo.FieldExtraInfo fieldExtraInfo = rdbTableInfo.getFieldExtraInfoList().get(pos); int charLength = fieldExtraInfo == null ? 0 : fieldExtraInfo.getLength(); if (charLength > 0) { return String.format(RPAD_FORMAT, charLength); } } return SQL_DEFAULT_PLACEHOLDER; } public boolean containsIgnoreCase(List<String> l, String s) { Iterator<String> it = l.iterator(); while (it.hasNext()) { if (it.next().equalsIgnoreCase(s)) return true; } return false; } >>>>>>>
<<<<<<< import org.apache.flink.api.java.tuple.Tuple2; ======= import org.apache.commons.collections.MapUtils; import org.apache.flink.api.common.functions.RuntimeContext; >>>>>>> import org.apache.flink.api.java.tuple.Tuple2; <<<<<<< import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo; ======= import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.api.operators.async.queue.StreamRecordQueueEntry; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService; import org.apache.flink.table.runtime.types.CRow; >>>>>>> import org.apache.flink.streaming.api.operators.async.queue.StreamRecordQueueEntry; import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo; <<<<<<< protected void dealFillDataError(ResultFuture<Tuple2<Boolean,Row>> resultFuture, Exception e, Object sourceData) { LOG.debug("source data {} join side table error ", sourceData); LOG.debug("async buid row error..{}", e); ======= @Override public void asyncInvoke(CRow row, ResultFuture<CRow> resultFuture) throws Exception { CRow input = new CRow(Row.copy(row.row()), row.change()); preInvoke(input, resultFuture); Map<String, Object> inputParams = parseInputParam(input); if(MapUtils.isEmpty(inputParams)){ dealMissKey(input, resultFuture); return; } if(isUseCache(inputParams)){ invokeWithCache(inputParams, input, resultFuture); return; } handleAsyncInvoke(inputParams, input, resultFuture); } private Map<String, Object> parseInputParam(CRow input){ Map<String, Object> inputParams = Maps.newHashMap(); for (int i = 0; i < sideInfo.getEqualValIndex().size(); i++) { Integer conValIndex = sideInfo.getEqualValIndex().get(i); Object equalObj = input.row().getField(conValIndex); if(equalObj == null){ return inputParams; } String columnName = sideInfo.getEqualFieldList().get(i); inputParams.put(columnName, equalObj); } return inputParams; } protected boolean isUseCache(Map<String, Object> inputParams){ return openCache() && getFromCache(buildCacheKey(inputParams)) != null; } private void invokeWithCache(Map<String, Object> inputParams, CRow input, ResultFuture<CRow> resultFuture){ if (openCache()) { CacheObj val = getFromCache(buildCacheKey(inputParams)); if (val != null) { if (ECacheContentType.MissVal == val.getType()) { dealMissKey(input, resultFuture); return; }else if(ECacheContentType.SingleLine == val.getType()){ try { Row row = fillData(input.row(), val.getContent()); resultFuture.complete(Collections.singleton(new CRow(row, input.change()))); } catch (Exception e) { dealFillDataError(input, resultFuture, e); } } else if (ECacheContentType.MultiLine == val.getType()) { try { List<CRow> rowList = Lists.newArrayList(); for (Object one : (List) val.getContent()) { Row row = fillData(input.row(), one); rowList.add(new CRow(row, input.change())); } resultFuture.complete(rowList); } catch (Exception e) { dealFillDataError(input, resultFuture, e); } } else { resultFuture.completeExceptionally(new RuntimeException("not support cache obj type " + val.getType())); } return; } } } public abstract void handleAsyncInvoke(Map<String, Object> inputParams, CRow input, ResultFuture<CRow> resultFuture) throws Exception; public abstract String buildCacheKey(Map<String, Object> inputParams); private ProcessingTimeService getProcessingTimeService(){ return ((StreamingRuntimeContext)this.runtimeContext).getProcessingTimeService(); } protected ScheduledFuture<?> registerTimer(CRow input, ResultFuture<CRow> resultFuture){ long timeoutTimestamp = sideInfo.getSideTableInfo().getAsyncTimeout() + getProcessingTimeService().getCurrentProcessingTime(); return getProcessingTimeService().registerTimer( timeoutTimestamp, new ProcessingTimeCallback() { @Override public void onProcessingTime(long timestamp) throws Exception { timeout(input, resultFuture); } }); } protected void cancelTimerWhenComplete(ResultFuture<CRow> resultFuture, ScheduledFuture<?> timerFuture){ ThreadPoolExecutor executors = new ThreadPoolExecutor(1, 1,0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>()); if(resultFuture instanceof StreamRecordQueueEntry){ StreamRecordQueueEntry streamRecordBufferEntry = (StreamRecordQueueEntry) resultFuture; streamRecordBufferEntry.onComplete((Object value) -> { timerFuture.cancel(true); },executors); } } protected void dealFillDataError(CRow input, ResultFuture<CRow> resultFuture, Throwable e) { >>>>>>> @Override public void asyncInvoke(CRow row, ResultFuture<CRow> resultFuture) throws Exception { CRow input = new CRow(Row.copy(row.row()), row.change()); preInvoke(input, resultFuture); Map<String, Object> inputParams = parseInputParam(input); if(MapUtils.isEmpty(inputParams)){ dealMissKey(input, resultFuture); return; } if(isUseCache(inputParams)){ invokeWithCache(inputParams, input, resultFuture); return; } handleAsyncInvoke(inputParams, input, resultFuture); } private Map<String, Object> parseInputParam(CRow input){ Map<String, Object> inputParams = Maps.newHashMap(); for (int i = 0; i < sideInfo.getEqualValIndex().size(); i++) { Integer conValIndex = sideInfo.getEqualValIndex().get(i); Object equalObj = input.row().getField(conValIndex); if(equalObj == null){ return inputParams; } String columnName = sideInfo.getEqualFieldList().get(i); inputParams.put(columnName, equalObj); } return inputParams; } protected boolean isUseCache(Map<String, Object> inputParams){ return openCache() && getFromCache(buildCacheKey(inputParams)) != null; } private void invokeWithCache(Map<String, Object> inputParams, Tuple2<Boolean,Row> input, ResultFuture<Tuple2<Boolean,Row>> resultFuture){ if (openCache()) { CacheObj val = getFromCache(buildCacheKey(inputParams)); if (val != null) { if (ECacheContentType.MissVal == val.getType()) { dealMissKey(input, resultFuture); return; }else if(ECacheContentType.SingleLine == val.getType()){ try { Row row = fillData(input.f0, val.getContent()); resultFuture.complete(Collections.singleton(new Tuple2<Boolean,Row>(row, input.change()))); } catch (Exception e) { dealFillDataError(input, resultFuture, e); } } else if (ECacheContentType.MultiLine == val.getType()) { try { List<Tuple2<Boolean,Row>> rowList = Lists.newArrayList(); for (Object one : (List) val.getContent()) { Row row = fillData(input.row(), one); rowList.add(new Tuple2<Boolean,Row>(row, input.change())); } resultFuture.complete(rowList); } catch (Exception e) { dealFillDataError(input, resultFuture, e); } } else { resultFuture.completeExceptionally(new RuntimeException("not support cache obj type " + val.getType())); } return; } } } public abstract void handleAsyncInvoke(Map<String, Object> inputParams, Tuple2<Boolean,Row> input, ResultFuture<Tuple2<Boolean,Row>> resultFuture) throws Exception; public abstract String buildCacheKey(Map<String, Object> inputParams); private ProcessingTimeService getProcessingTimeService(){ return ((StreamingRuntimeContext)this.runtimeContext).getProcessingTimeService(); } protected ScheduledFuture<?> registerTimer(Tuple2<Boolean,Row> input, ResultFuture<Tuple2<Boolean,Row>> resultFuture){ long timeoutTimestamp = sideInfo.getSideTableInfo().getAsyncTimeout() + getProcessingTimeService().getCurrentProcessingTime(); return getProcessingTimeService().registerTimer( timeoutTimestamp, new ProcessingTimeCallback() { @Override public void onProcessingTime(long timestamp) throws Exception { timeout(input, resultFuture); } }); } protected void cancelTimerWhenComplete(ResultFuture<Tuple2<Boolean,Row>> resultFuture, ScheduledFuture<?> timerFuture){ ThreadPoolExecutor executors = new ThreadPoolExecutor(1, 1,0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>()); if(resultFuture instanceof StreamRecordQueueEntry){ StreamRecordQueueEntry streamRecordBufferEntry = (StreamRecordQueueEntry) resultFuture; streamRecordBufferEntry.onComplete((Object value) -> { timerFuture.cancel(true); },executors); } } protected void dealFillDataError(Tuple2<Boolean,Row> input, ResultFuture<Tuple2<Boolean,Row>> resultFuture, Throwable e) {
<<<<<<< public ParamsInfo(String sql, String name, List<URL> jarUrlList, String localSqlPluginPath, String remoteSqlPluginPath, String pluginLoadMode, String deployMode, Properties confProp, String planner) { ======= private Properties dirtyProperties; public ParamsInfo( String sql , String name, List<URL> jarUrlList , String localSqlPluginPath , String remoteSqlPluginPath , String pluginLoadMode , String deployMode , Properties confProp , Properties dirtyProperties) { >>>>>>> private Properties dirtyProperties; public ParamsInfo( String sql , String name , List<URL> jarUrlList , String localSqlPluginPath , String remoteSqlPluginPath , String pluginLoadMode , String deployMode , Properties confProp , Properties dirtyProperties , String planner) { <<<<<<< this.planner = planner; ======= this.dirtyProperties = dirtyProperties; >>>>>>> this.planner = planner; this.dirtyProperties = dirtyProperties; } public static ParamsInfo.Builder builder() { return new ParamsInfo.Builder(); <<<<<<< public String getPlanner() { return planner; } ======= public Properties getDirtyProperties() { return dirtyProperties; } >>>>>>> public Properties getDirtyProperties() { return dirtyProperties; } public String getPlanner() { return planner; } <<<<<<< ", planner=" + planner + ======= ", getPlan=" + getPlan + ", dirtyProperties='" + dirtyProperties + '\'' + >>>>>>> ", getPlan=" + getPlan + ", dirtyProperties='" + dirtyProperties + '\'' + ", planner=" + planner + <<<<<<< private String planner; ======= private Properties dirtyProperties; >>>>>>> private String planner; private Properties dirtyProperties; <<<<<<< public ParamsInfo.Builder setPlanner(String planner) { this.planner = planner; return this; } ======= public ParamsInfo.Builder setDirtyProperties(Properties dirtyProperties) { this.dirtyProperties = dirtyProperties; return this; } >>>>>>> public ParamsInfo.Builder setDirtyProperties(Properties dirtyProperties) { this.dirtyProperties = dirtyProperties; return this; } public ParamsInfo.Builder setPlanner(String planner) { this.planner = planner; return this; } <<<<<<< return new ParamsInfo(sql, name, jarUrlList, localSqlPluginPath, remoteSqlPluginPath, pluginLoadMode, deployMode, confProp, planner); ======= return new ParamsInfo( sql , name , jarUrlList , localSqlPluginPath , remoteSqlPluginPath , pluginLoadMode , deployMode , confProp , dirtyProperties); >>>>>>> return new ParamsInfo( sql , name , jarUrlList , localSqlPluginPath , remoteSqlPluginPath , pluginLoadMode , deployMode , confProp , dirtyProperties , planner );
<<<<<<< import net.minecraft.util.registry.RegistryKey; import net.minecraft.world.World; ======= import net.minecraft.util.registry.Registry; import net.minecraft.world.dimension.DimensionType; >>>>>>> import net.minecraft.util.registry.Registry; import net.minecraft.world.dimension.DimensionType; import net.minecraft.util.registry.RegistryKey; import net.minecraft.world.World; <<<<<<< public static void sendShape(List<ServerPlayerEntity> players, RegistryKey<World> dim, ExpiringShape shape) ======= public static void sendShape(List<ServerPlayerEntity> players, ExpiringShape shape, Map<String, Value> params) >>>>>>> public static void sendShape(List<ServerPlayerEntity> players, ExpiringShape shape, Map<String, Value> params) <<<<<<< if (tag == null) tag = shape.toTag(); tag.putString("dim", dim.getValue().toString()); ======= if (tag == null) tag = ExpiringShape.toTag(params); >>>>>>> if (tag == null) tag = ExpiringShape.toTag(params);
<<<<<<< private static Map<String, DtClassLoader> pluginClassLoader = new ConcurrentHashMap<>(); // 维表插件的classloader,该集合存放的是AppClassloader private static Map<String, URLClassLoader> pluginAppClassLoader = new ConcurrentHashMap<>(); ======= public static void forName(String clazz, ClassLoader classLoader) { synchronized (LOCK) { try { Class.forName(clazz, true, classLoader); DriverManager.setLoginTimeout(10); } catch (Exception e) { throw new RuntimeException(e); } } } public synchronized static void forName(String clazz) { try { Class<?> driverClass = Class.forName(clazz); driverClass.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } } >>>>>>> // 维表插件的classloader,该集合存放的是AppClassloader private static Map<String, URLClassLoader> pluginAppClassLoader = new ConcurrentHashMap<>(); public static void forName(String clazz, ClassLoader classLoader) { synchronized (LOCK) { try { Class.forName(clazz, true, classLoader); DriverManager.setLoginTimeout(10); } catch (Exception e) { throw new RuntimeException(e); } } } public synchronized static void forName(String clazz) { try { Class<?> driverClass = Class.forName(clazz); driverClass.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } }
<<<<<<< ======= if (localSqlPluginPath == null) { throw new RuntimeException("need to set localSqlPluginPath"); } >>>>>>> if (localSqlPluginPath == null) { throw new RuntimeException("need to set localSqlPluginPath"); } <<<<<<< ======= >>>>>>> <<<<<<< public void setPluginLoadMode(String pluginLoadMode) { this.pluginLoadMode = pluginLoadMode; } private Table getTableFromCache(Map<String, Table> localTableCache, String tableAlias, String tableName){ ======= private Table getTableFromCache(Map<String, Table> localTableCache, String tableAlias, String tableName) { >>>>>>> public void setPluginLoadMode(String pluginLoadMode) { this.pluginLoadMode = pluginLoadMode; } private Table getTableFromCache(Map<String, Table> localTableCache, String tableAlias, String tableName) { <<<<<<< if(ECacheType.ALL.name().equalsIgnoreCase(sideTableInfo.getCacheType())){ dsOut = SideWithAllCacheOperator.getSideJoinDataStream(adaptStream, sideTableInfo.getType(), localSqlPluginPath, typeInfo, joinInfo, sideJoinFieldInfo, sideTableInfo, pluginLoadMode); }else{ dsOut = SideAsyncOperator.getSideJoinDataStream(adaptStream, sideTableInfo.getType(), localSqlPluginPath, typeInfo, joinInfo, sideJoinFieldInfo, sideTableInfo, pluginLoadMode); ======= if (ECacheType.ALL.name().equalsIgnoreCase(sideTableInfo.getCacheType())) { dsOut = SideWithAllCacheOperator.getSideJoinDataStream(adaptStream, sideTableInfo.getType(), localSqlPluginPath, typeInfo, joinInfo, sideJoinFieldInfo, sideTableInfo); } else { dsOut = SideAsyncOperator.getSideJoinDataStream(adaptStream, sideTableInfo.getType(), localSqlPluginPath, typeInfo, joinInfo, sideJoinFieldInfo, sideTableInfo); >>>>>>> if(ECacheType.ALL.name().equalsIgnoreCase(sideTableInfo.getCacheType())){ dsOut = SideWithAllCacheOperator.getSideJoinDataStream(adaptStream, sideTableInfo.getType(), localSqlPluginPath, typeInfo, joinInfo, sideJoinFieldInfo, sideTableInfo, pluginLoadMode); }else{ dsOut = SideAsyncOperator.getSideJoinDataStream(adaptStream, sideTableInfo.getType(), localSqlPluginPath, typeInfo, joinInfo, sideJoinFieldInfo, sideTableInfo, pluginLoadMode);
<<<<<<< import org.apache.flink.configuration.Configuration; import org.apache.flink.metrics.Counter; ======= >>>>>>>
<<<<<<< import java.util.TimeZone; ======= import java.util.Map; import java.util.TimeZone; >>>>>>> import java.util.Map; import java.util.TimeZone; import java.util.TimeZone;
<<<<<<< ======= import com.dtstack.flink.sql.util.RowDataComplete; import com.dtstack.flink.sql.util.ThreadUtil; >>>>>>> import com.dtstack.flink.sql.util.ThreadUtil; <<<<<<< protected void preInvoke(Row input, ResultFuture<Row> resultFuture) { } ======= protected void preInvoke(Row input, ResultFuture<BaseRow> resultFuture) { } >>>>>>> protected void preInvoke(Row input, ResultFuture<Row> resultFuture) { } <<<<<<< Map<String, Object> inputParams, Row input, ResultFuture<Row> resultFuture, SQLClient rdbSqlClient, AtomicLong failCounter, AtomicBoolean finishFlag, CountDownLatch latch) { ======= Map<String, Object> inputParams, Row input, ResultFuture<BaseRow> resultFuture, SQLClient rdbSqlClient, AtomicLong failCounter, AtomicBoolean finishFlag, CountDownLatch latch) { >>>>>>> Map<String, Object> inputParams, Row input, ResultFuture<Row> resultFuture, SQLClient rdbSqlClient, AtomicLong failCounter, AtomicBoolean finishFlag, CountDownLatch latch) {
<<<<<<< import com.dtstack.flink.sql.side.AbstractSideTableInfo; import com.dtstack.flink.sql.side.BaseAsyncReqRow; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.table.runtime.types.CRow; import org.apache.flink.types.Row; ======= >>>>>>> import com.dtstack.flink.sql.side.AbstractSideTableInfo; import com.dtstack.flink.sql.side.BaseAsyncReqRow; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.table.runtime.types.CRow; import org.apache.flink.types.Row; <<<<<<< import com.dtstack.flink.sql.side.CacheMissVal; import com.dtstack.flink.sql.side.FieldInfo; import com.dtstack.flink.sql.side.JoinInfo; ======= import com.dtstack.flink.sql.side.*; >>>>>>> import com.dtstack.flink.sql.side.CacheMissVal; import com.dtstack.flink.sql.side.FieldInfo; import com.dtstack.flink.sql.side.JoinInfo; <<<<<<< default: ======= default: break; >>>>>>> default: break;
<<<<<<< import org.apache.flink.api.java.tuple.Tuple2; ======= import org.apache.commons.lang3.StringUtils; >>>>>>> import org.apache.flink.api.java.tuple.Tuple2; import org.apache.commons.lang3.StringUtils; <<<<<<< public void asyncInvoke(Tuple2<Boolean,Row> input, ResultFuture<Tuple2<Boolean,Row>> resultFuture) throws Exception { Tuple2<Boolean,Row> inputCopy = Tuple2.of(input.f0,input.f1); Map<String, Object> refData = Maps.newHashMap(); for (int i = 0; i < sideInfo.getEqualValIndex().size(); i++) { Integer conValIndex = sideInfo.getEqualValIndex().get(i); Object equalObj = inputCopy.f1.getField(conValIndex); if(equalObj == null){ dealMissKey(inputCopy, resultFuture); return; } refData.put(sideInfo.getEqualFieldList().get(i), equalObj); } String rowKeyStr = ((HbaseAsyncSideInfo)sideInfo).getRowKeyBuilder().getRowKey(refData); //get from cache if (openCache()) { CacheObj val = getFromCache(rowKeyStr); if (val != null) { if (ECacheContentType.MissVal == val.getType()) { dealMissKey(inputCopy, resultFuture); return; } else if (ECacheContentType.SingleLine == val.getType()) { try { Row row = fillData(inputCopy.f1, val); resultFuture.complete(Collections.singleton(Tuple2.of(inputCopy.f0,row))); } catch (Exception e) { dealFillDataError(resultFuture, e, inputCopy); } } else if (ECacheContentType.MultiLine == val.getType()) { try { for (Object one : (List) val.getContent()) { Row row = fillData(inputCopy.f1, one); resultFuture.complete(Collections.singleton(Tuple2.of(inputCopy.f0,row))); } } catch (Exception e) { dealFillDataError(resultFuture, e, inputCopy); } } return; } } ======= public void handleAsyncInvoke(Map<String, Object> inputParams, CRow input, ResultFuture<CRow> resultFuture) throws Exception { rowKeyMode.asyncGetData(tableName, buildCacheKey(inputParams), input, resultFuture, sideInfo.getSideCache()); } >>>>>>> public void handleAsyncInvoke(Map<String, Object> inputParams, Tuple2<Boolean,Row> input, ResultFuture<Tuple2<Boolean,Row>> resultFuture) throws Exception { rowKeyMode.asyncGetData(tableName, buildCacheKey(inputParams), input, resultFuture, sideInfo.getSideCache()); }
<<<<<<< if(matcher.find()) { tableName = matcher.group(3); selectSql = "select " + matcher.group(4); ======= if (matcher.find()) { tableName = matcher.group(1); selectSql = "select " + matcher.group(2); >>>>>>> if (matcher.find()) { tableName = matcher.group(1); selectSql = "select " + matcher.group(2); <<<<<<< } } private static void parseNode(SqlNode sqlNode, CreateTmpTableParser.SqlParserResult sqlParseResult){ SqlKind sqlKind = sqlNode.getKind(); switch (sqlKind){ case SELECT: SqlNode sqlFrom = ((SqlSelect)sqlNode).getFrom(); if(sqlFrom.getKind() == IDENTIFIER){ sqlParseResult.addSourceTable(sqlFrom.toString()); }else{ parseNode(sqlFrom, sqlParseResult); } break; case JOIN: SqlNode leftNode = ((SqlJoin)sqlNode).getLeft(); SqlNode rightNode = ((SqlJoin)sqlNode).getRight(); if(leftNode.getKind() == IDENTIFIER){ sqlParseResult.addSourceTable(leftNode.toString()); }else{ parseNode(leftNode, sqlParseResult); } if(rightNode.getKind() == IDENTIFIER){ sqlParseResult.addSourceTable(rightNode.toString()); }else{ parseNode(rightNode, sqlParseResult); } break; case AS: //不解析column,所以 as 相关的都是表 SqlNode identifierNode = ((SqlBasicCall)sqlNode).getOperands()[0]; if(identifierNode.getKind() != IDENTIFIER){ parseNode(identifierNode, sqlParseResult); }else { sqlParseResult.addSourceTable(identifierNode.toString()); } break; case UNION: SqlNode unionLeft = ((SqlBasicCall)sqlNode).getOperands()[0]; SqlNode unionRight = ((SqlBasicCall)sqlNode).getOperands()[1]; if(unionLeft.getKind() == IDENTIFIER){ sqlParseResult.addSourceTable(unionLeft.toString()); }else{ parseNode(unionLeft, sqlParseResult); } if(unionRight.getKind() == IDENTIFIER){ sqlParseResult.addSourceTable(unionRight.toString()); }else{ parseNode(unionRight, sqlParseResult); } break; case MATCH_RECOGNIZE: SqlMatchRecognize node = (SqlMatchRecognize) sqlNode; sqlParseResult.addSourceTable(node.getTableRef().toString()); break; case SNAPSHOT: SqlSnapshot sqlSnapshot = (SqlSnapshot) sqlNode; sqlParseResult.addSourceTable(sqlSnapshot.getTableRef().toString()); break; default: //do nothing break; ======= >>>>>>>
<<<<<<< if (ToolHelper.damageTool(slots[SLOT_PICKAXE], worldObj, this.pos.getX(), this.pos.getY(), this.pos.getZ())) { slots[SLOT_PICKAXE] = null; ======= if (ToolHelper.damageTool(slots[SLOT_PICKAXE], worldObj, this.xCoord, this.yCoord, this.zCoord)) { destroyTool(SLOT_PICKAXE); >>>>>>> if (ToolHelper.damageTool(slots[SLOT_PICKAXE], worldObj, this.pos.getX(), this.pos.getY(), this.pos.getZ())) { destroyTool(SLOT_PICKAXE);
<<<<<<< ======= import carpet.CarpetSettings; >>>>>>> import carpet.CarpetSettings;
<<<<<<< }*/ ======= } @Override public void addPotionEffect(PotionEffect p_70690_1_) { } >>>>>>> }*/ @Override public void addPotionEffect(PotionEffect p_70690_1_) { }
<<<<<<< //registerMod(new Pams()); //registerMod(new Pneumaticcraft()); ======= registerMod(new ImmersiveEngineering()); registerMod(new Pams()); registerMod(new Pneumaticcraft()); >>>>>>> //registerMod(new ImmersiveEngineering()); //registerMod(new Pams()); //registerMod(new Pneumaticcraft()); <<<<<<< //registerMod(new AgriCraft()); //registerMod(new GrowOres()); ======= registerMod(new AgriCraft()); registerMod(new GrowOres()); registerMod(new ThaumCraft()); >>>>>>> //registerMod(new AgriCraft()); //registerMod(new GrowOres()); //registerMod(new ThaumCraft());
<<<<<<< if (ToolHelper.damageTool(slots[SLOT_SWORD], worldObj, pos.getX(), pos.getY(), pos.getZ())) { slots[SLOT_SWORD] = null; ======= if (ToolHelper.damageTool(slots[SLOT_SWORD], worldObj, xCoord, yCoord, zCoord)) { destroyTool(SLOT_SWORD); >>>>>>> if (ToolHelper.damageTool(slots[SLOT_SWORD], worldObj, pos.getX(), pos.getY(), pos.getZ())) { destroyTool(SLOT_SWORD);
<<<<<<< import static org.assertj.core.util.Sets.newHashSet; ======= import static org.assertj.core.util.Sets.newLinkedHashSet; >>>>>>> import static org.assertj.core.util.Sets.newHashSet; import static org.assertj.core.util.Sets.newLinkedHashSet; <<<<<<< public void allMatch_iterable_assertion_example() { List<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).allMatch(character -> character.getRace() == HOBBIT, "hobbits"); } @Test public void allSatisfy_iterable_assertion_example() { List<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).allSatisfy(character -> { assertThat(character.getRace()).isEqualTo(HOBBIT); assertThat(character.getName()).isNotEqualTo("Sauron"); }); } @Test public void anySatisfy_iterable_assertion_example() { List<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).anySatisfy(character -> { assertThat(character.getRace()).isEqualTo(HOBBIT); assertThat(character.age).isLessThan(30); }); } @Test public void iterable_assertions_comparing_elements_field_by_field_example() { // this is useful if elements don't have a good equals method implementation. Employee bill = new Employee("Bill", 60, "Micro$oft"); final List<Employee> micro$oftEmployees = newArrayList(bill); Employee appleBill = new Employee("Bill", 60, "Apple"); // this assertion should fail as the company differs but it passes since Employee equals ignores company fields. assertThat(micro$oftEmployees).contains(appleBill); // let's make the assertion fails by comparing all Employee's fields instead of using equals. try { assertThat(micro$oftEmployees).usingFieldByFieldElementComparator().contains(appleBill); } catch (AssertionError e) { logAssertionErrorMessage("contains for Iterable using field by field element comparator", e); } // if we don't compare company, appleBill is equivalent to bill. assertThat(micro$oftEmployees).usingElementComparatorIgnoringFields("company").contains(appleBill); // if we compare only name and company, youngBill is equivalent to bill ... Employee youngBill = new Employee("Bill", 25, "Micro$oft"); assertThat(micro$oftEmployees).usingElementComparatorOnFields("company").contains(youngBill); // ... but not if we compare only age. try { assertThat(micro$oftEmployees).usingElementComparatorOnFields("age").contains(youngBill); } catch (AssertionError e) { logAssertionErrorMessage("contains for Iterable usingElementComparatorOnFields", e); } // another example with usingElementComparatorOnFields TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT); TolkienCharacter sam = new TolkienCharacter("Sam", 38, HOBBIT); // frodo and sam both are hobbits, so they are equals when comparing only race ... assertThat(newArrayList(frodo)).usingElementComparatorOnFields("race").contains(sam); assertThat(newArrayList(frodo)).usingElementComparatorOnFields("race").isEqualTo(newArrayList(sam)); // ... but not when comparing both name and race try { assertThat(newArrayList(frodo)).usingElementComparatorOnFields("name", "race").contains(sam); } catch (AssertionError e) { logAssertionErrorMessage("contains for Iterable usingElementComparatorOnFields", e); } } @Test ======= >>>>>>> public void allMatch_iterable_assertion_example() { List<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).allMatch(character -> character.getRace() == HOBBIT, "hobbits"); } @Test public void allSatisfy_iterable_assertion_example() { List<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).allSatisfy(character -> { assertThat(character.getRace()).isEqualTo(HOBBIT); assertThat(character.getName()).isNotEqualTo("Sauron"); }); } @Test public void anySatisfy_iterable_assertion_example() { List<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).anySatisfy(character -> { assertThat(character.getRace()).isEqualTo(HOBBIT); assertThat(character.age).isLessThan(30); }); } @Test public void iterable_assertions_comparing_elements_field_by_field_example() { // this is useful if elements don't have a good equals method implementation. Employee bill = new Employee("Bill", 60, "Micro$oft"); final List<Employee> micro$oftEmployees = newArrayList(bill); Employee appleBill = new Employee("Bill", 60, "Apple"); // this assertion should fail as the company differs but it passes since Employee equals ignores company fields. assertThat(micro$oftEmployees).contains(appleBill); // let's make the assertion fails by comparing all Employee's fields instead of using equals. try { assertThat(micro$oftEmployees).usingFieldByFieldElementComparator().contains(appleBill); } catch (AssertionError e) { logAssertionErrorMessage("contains for Iterable using field by field element comparator", e); } // if we don't compare company, appleBill is equivalent to bill. assertThat(micro$oftEmployees).usingElementComparatorIgnoringFields("company").contains(appleBill); // if we compare only name and company, youngBill is equivalent to bill ... Employee youngBill = new Employee("Bill", 25, "Micro$oft"); assertThat(micro$oftEmployees).usingElementComparatorOnFields("company").contains(youngBill); // ... but not if we compare only age. try { assertThat(micro$oftEmployees).usingElementComparatorOnFields("age").contains(youngBill); } catch (AssertionError e) { logAssertionErrorMessage("contains for Iterable usingElementComparatorOnFields", e); } // another example with usingElementComparatorOnFields TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT); TolkienCharacter sam = new TolkienCharacter("Sam", 38, HOBBIT); // frodo and sam both are hobbits, so they are equals when comparing only race ... assertThat(newArrayList(frodo)).usingElementComparatorOnFields("race").contains(sam); assertThat(newArrayList(frodo)).usingElementComparatorOnFields("race").isEqualTo(newArrayList(sam)); // ... but not when comparing both name and race try { assertThat(newArrayList(frodo)).usingElementComparatorOnFields("name", "race").contains(sam); } catch (AssertionError e) { logAssertionErrorMessage("contains for Iterable usingElementComparatorOnFields", e); } } @Test <<<<<<< @Test public void should_not_produce_warning_for_varargs_parameter() { List<Entry<String, String>> list = new ArrayList<>(); list.add(Pair.of("A", "B")); assertThat(list).containsExactly(Pair.of("A", "B")); } ======= @Test public void test_issue_656() { Iterator<String> iterator = new ArrayList<String>().iterator(); assertThat(iterator).isSameAs(iterator); } @Test public void test_navigation_with_iterable() { Iterable<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).first().isEqualTo(frodo); assertThat(hobbits).element(1).isEqualTo(sam); assertThat(hobbits).last().isEqualTo(pippin); Iterable<String> hobbitsName = newArrayList("frodo", "sam", "pippin"); // assertion succeeds assertThat(hobbitsName, StringAssert.class).first() .startsWith("fro") .endsWith("do"); assertThat(hobbitsName, StringAssert.class).element(1).contains("a"); assertThat(hobbitsName, StringAssert.class).last().endsWith("in"); } @Test public void test_navigation_with_list() { List<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).first().isEqualTo(frodo); assertThat(hobbits).element(1).isEqualTo(sam); assertThat(hobbits).last().isEqualTo(pippin); } @Test public void test_navigable_size_assertions() { Iterable<Ring> elvesRings = newArrayList(vilya, nenya, narya); // assertion will pass: assertThat(elvesRings).size() .isGreaterThan(1) .isLessThanOrEqualTo(3) .returnToIterable() .contains(narya) .doesNotContain(oneRing); } @Test public void should_not_produce_warning_for_varargs_parameter() { List<Entry<String, String>> list = new ArrayList<>(); list.add(Pair.of("A", "B")); assertThat(list).containsExactly(Pair.of("A", "B")); } @Test public void should_not_forget_assertion_description() { try { assertThat(fellowshipOfTheRing).as("check hobbits") .extracting("name") .contains(sauron); } catch (AssertionError error) { assertThat(error).hasMessageContaining("check hobbits"); } } >>>>>>> @Test public void test_issue_656() { Iterator<String> iterator = new ArrayList<String>().iterator(); assertThat(iterator).isSameAs(iterator); } @Test public void test_navigation_with_iterable() { Iterable<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).first().isEqualTo(frodo); assertThat(hobbits).element(1).isEqualTo(sam); assertThat(hobbits).last().isEqualTo(pippin); Iterable<String> hobbitsName = newArrayList("frodo", "sam", "pippin"); // assertion succeeds assertThat(hobbitsName, StringAssert.class).first() .startsWith("fro") .endsWith("do"); assertThat(hobbitsName, StringAssert.class).element(1).contains("a"); assertThat(hobbitsName, StringAssert.class).last().endsWith("in"); } @Test public void test_navigation_with_list() { List<TolkienCharacter> hobbits = newArrayList(frodo, sam, pippin); assertThat(hobbits).first().isEqualTo(frodo); assertThat(hobbits).element(1).isEqualTo(sam); assertThat(hobbits).last().isEqualTo(pippin); } @Test public void test_navigable_size_assertions() { Iterable<Ring> elvesRings = newArrayList(vilya, nenya, narya); // assertion will pass: assertThat(elvesRings).size() .isGreaterThan(1) .isLessThanOrEqualTo(3) .returnToIterable() .contains(narya) .doesNotContain(oneRing); } @Test public void should_not_produce_warning_for_varargs_parameter() { List<Entry<String, String>> list = new ArrayList<>(); list.add(Pair.of("A", "B")); assertThat(list).containsExactly(Pair.of("A", "B")); } @Test public void should_not_forget_assertion_description() { try { assertThat(fellowshipOfTheRing).as("check hobbits") .extracting("name") .contains(sauron); } catch (AssertionError error) { assertThat(error).hasMessageContaining("check hobbits"); } }
<<<<<<< @Test public void allSatisfy_example() { Map<TolkienCharacter, Ring> elvesRingBearers = new HashMap<>(); elvesRingBearers.put(galadriel, nenya); elvesRingBearers.put(gandalf, narya); elvesRingBearers.put(elrond, vilya); assertThat(elvesRingBearers).allSatisfy((character, ring) -> { assertThat(character.getRace()).isIn(ELF, MAIA); assertThat(ring).isIn(nenya, narya, vilya); }); } ======= @Test public void map_flatExtracting_examples() { Map<String, List<BasketBallPlayer>> teams = new HashMap<>(); teams.put("spurs", asList(tonyParker, ginobili)); teams.put("cavs", asList(james, dwayne)); assertThat(teams).flatExtracting("spurs", "cavs") .containsExactly(tonyParker, ginobili, james, dwayne); } >>>>>>> @Test public void allSatisfy_example() { Map<TolkienCharacter, Ring> elvesRingBearers = new HashMap<>(); elvesRingBearers.put(galadriel, nenya); elvesRingBearers.put(gandalf, narya); elvesRingBearers.put(elrond, vilya); assertThat(elvesRingBearers).allSatisfy((character, ring) -> { assertThat(character.getRace()).isIn(ELF, MAIA); assertThat(ring).isIn(nenya, narya, vilya); }); } public void map_flatExtracting_examples() { Map<String, List<BasketBallPlayer>> teams = new HashMap<>(); teams.put("spurs", asList(tonyParker, ginobili)); teams.put("cavs", asList(james, dwayne)); assertThat(teams).flatExtracting("spurs", "cavs") .containsExactly(tonyParker, ginobili, james, dwayne); }
<<<<<<< @org.assertj.core.util.CheckReturnValue public org.assertj.examples.data.neo4j.DragonBallGraphAssert assertThat(org.assertj.examples.data.neo4j.DragonBallGraph actual) { return proxy(org.assertj.examples.data.neo4j.DragonBallGraphAssert.class, org.assertj.examples.data.neo4j.DragonBallGraph.class, actual); ======= public org.assertj.examples.data.neo4j.DragonBallGraphRepositoryAssert assertThat(org.assertj.examples.data.neo4j.DragonBallGraphRepository actual) { return proxy(org.assertj.examples.data.neo4j.DragonBallGraphRepositoryAssert.class, org.assertj.examples.data.neo4j.DragonBallGraphRepository.class, actual); >>>>>>> @org.assertj.core.util.CheckReturnValue public org.assertj.examples.data.neo4j.DragonBallGraphRepositoryAssert assertThat(org.assertj.examples.data.neo4j.DragonBallGraphRepository actual) { return proxy(org.assertj.examples.data.neo4j.DragonBallGraphRepositoryAssert.class, org.assertj.examples.data.neo4j.DragonBallGraphRepository.class, actual);
<<<<<<< import static org.assertj.core.api.Assertions.atIndex; import static org.assertj.core.util.Lists.newArrayList; ======= import static org.assertj.examples.data.Race.ELF; >>>>>>> import static org.assertj.core.api.Assertions.atIndex; import static org.assertj.core.util.Lists.newArrayList; import static org.assertj.examples.data.Race.ELF; <<<<<<< import java.util.List; import java.util.Map; import org.assertj.core.api.Assertions; import org.assertj.core.api.AssertionsForInterfaceTypes; ======= import java.util.ArrayList; import java.util.Comparator; import java.util.List; import org.assertj.core.api.Assertions; >>>>>>> import java.util.List; import java.util.ArrayList; import java.util.Comparator; import java.util.Map; import org.assertj.core.api.Assertions; import org.assertj.core.api.AssertionsForInterfaceTypes; import org.assertj.core.util.Lists; <<<<<<< Assertions.setAllowComparingPrivateFields(true); } @Test public void extracting_object_values() { assertThat(frodo).extracting(TolkienCharacter::getName, character -> character.age, character -> character.getRace().getName()) .containsExactly("Frodo", 33, "Hobbit"); } @Test public void as_and_list_or_map() { List<Ring> elvesRings = newArrayList(vilya, nenya, narya); assertThat(elvesRings).as("abc").isNotEmpty(); assertThat(elvesRings).as("abc").contains(vilya, atIndex(0)); assertThat(elvesRings).as("abc %d %d", 1, 2).isNotEmpty(); assertThat(ringBearers).as("map").hasSize(4); assertThat(ringBearers).as("map %s", "size").hasSize(4); assertThat(ringBearers).as("map %s", "keys").containsOnlyKeys(vilya, nenya, narya, oneRing); assertThat(ringBearers).as("map").containsOnlyKeys(vilya, nenya, narya, oneRing); Map<String, String> map = ImmutableMap.of("Key1", "Value1", "Key2", "Value2"); assertThat(map).as("").containsOnlyKeys("Key1", "Key2"); Map map1 = new java.util.HashMap<>(); map1.put("Key1","Value1"); map1.put("Key2","Value2"); Assertions.assertThat(map1).containsOnlyKeys("Key1","Key2"); ======= Assertions.setAllowComparingPrivateFields(true); } @Test public void has_field_or_property_examples() { assertThat(frodo).hasFieldOrProperty("age"); // private field are found unless Assertions.setAllowExtractingPrivateFields(false); assertThat(frodo).hasFieldOrProperty("notAccessibleField"); assertThat(frodo).hasFieldOrPropertyWithValue("age", 33); assertThat(frodo).hasFieldOrProperty("race.name"); assertThat(frodo).hasFieldOrPropertyWithValue("race.name", "Hobbit"); } @Test public void extracting_field_or_property_examples() { assertThat(frodo).extracting("name", "age", "race.name") .containsExactly("Frodo", 33, "Hobbit"); } @Test public void field_by_field_comparison_with_specific_comparator_by_type_or_field_examples() { TolkienCharacter olderFrodo = new TolkienCharacter("Frodo", 35, HOBBIT); Assertions.setAllowComparingPrivateFields(false); // ignore notAccessibleField in comparison // specify a comparator for a single field : age assertThat(frodo).usingComparatorForFields(new AtPrecisionComparator<Integer>(2), "age") .isEqualToComparingFieldByField(olderFrodo) .isEqualToComparingOnlyGivenFields(olderFrodo, "age"); // specify a comparator for a field type : Integer assertThat(frodo).usingComparatorForType(new AtPrecisionComparator<Integer>(2), Integer.class) .isEqualToComparingFieldByField(olderFrodo) .isEqualToComparingOnlyGivenFields(olderFrodo, "age"); // field comparators take precendence over field type comparators assertThat(frodo).usingComparatorForFields(new AtPrecisionComparator<Integer>(2), "age") .usingComparatorForType(new AtPrecisionComparator<Integer>(1), Integer.class) .isEqualToComparingFieldByField(olderFrodo); TolkienCharacter elfFrodo = new TolkienCharacter("Frodo", 33, ELF); assertThat(frodo).usingComparatorForFields(new Comparator<String>() { @Override public int compare(String o1, String o2) { return 0; } }, "race.name").isEqualToComparingOnlyGivenFields(elfFrodo); Assertions.setAllowComparingPrivateFields(true); } @Test public void usingFieldByFieldElementComparatorTest() throws Exception { List<Animal> animals = new ArrayList<>(); Bird bird = new Bird("White"); Snake snake = new Snake(15); animals.add(bird); animals.add(snake); assertThat(animals).usingFieldByFieldElementComparator() .containsExactly(bird, snake); } private class Animal { private final String name; private Animal(String name) { this.name = name; } @SuppressWarnings("unused") public String getName() { return name; } >>>>>>> Assertions.setAllowComparingPrivateFields(true); } @Test public void extracting_object_values() { assertThat(frodo).extracting(TolkienCharacter::getName, character -> character.age, character -> character.getRace().getName()) .containsExactly("Frodo", 33, "Hobbit"); } @Test public void has_field_or_property_examples() { assertThat(frodo).hasFieldOrProperty("age"); // private field are found unless Assertions.setAllowExtractingPrivateFields(false); assertThat(frodo).hasFieldOrProperty("notAccessibleField"); assertThat(frodo).hasFieldOrPropertyWithValue("age", 33); assertThat(frodo).hasFieldOrProperty("race.name"); assertThat(frodo).hasFieldOrPropertyWithValue("race.name", "Hobbit"); } @Test public void extracting_field_or_property_examples() { assertThat(frodo).extracting("name", "age", "race.name") .containsExactly("Frodo", 33, "Hobbit"); } @SuppressWarnings("unchecked") @Test public void as_and_list_or_map() { List<Ring> elvesRings = newArrayList(vilya, nenya, narya); assertThat(elvesRings).as("abc").isNotEmpty(); assertThat(elvesRings).as("abc").contains(vilya, atIndex(0)); assertThat(elvesRings).as("abc %d %d", 1, 2).isNotEmpty(); assertThat(ringBearers).as("map").hasSize(4); assertThat(ringBearers).as("map %s", "size").hasSize(4); assertThat(ringBearers).as("map %s", "keys").containsOnlyKeys(vilya, nenya, narya, oneRing); assertThat(ringBearers).as("map").containsOnlyKeys(vilya, nenya, narya, oneRing); Map<String, String> map = ImmutableMap.of("Key1", "Value1", "Key2", "Value2"); assertThat(map).as("").containsOnlyKeys("Key1", "Key2"); @SuppressWarnings("rawtypes") Map map1 = new java.util.HashMap<>(); map1.put("Key1","Value1"); map1.put("Key2","Value2"); Assertions.assertThat(map1).containsOnlyKeys("Key1","Key2"); } @Test public void field_by_field_comparison_with_specific_comparator_by_type_or_field_examples() { TolkienCharacter olderFrodo = new TolkienCharacter("Frodo", 35, HOBBIT); Assertions.setAllowComparingPrivateFields(false); // ignore notAccessibleField in comparison // specify a comparator for a single field : age assertThat(frodo).usingComparatorForFields(new AtPrecisionComparator<Integer>(2), "age") .isEqualToComparingFieldByField(olderFrodo) .isEqualToComparingOnlyGivenFields(olderFrodo, "age"); // specify a comparator for a field type : Integer assertThat(frodo).usingComparatorForType(new AtPrecisionComparator<Integer>(2), Integer.class) .isEqualToComparingFieldByField(olderFrodo) .isEqualToComparingOnlyGivenFields(olderFrodo, "age"); // field comparators take precendence over field type comparators assertThat(frodo).usingComparatorForFields(new AtPrecisionComparator<Integer>(2), "age") .usingComparatorForType(new AtPrecisionComparator<Integer>(1), Integer.class) .isEqualToComparingFieldByField(olderFrodo); TolkienCharacter elfFrodo = new TolkienCharacter("Frodo", 33, ELF); assertThat(frodo).usingComparatorForFields(new Comparator<String>() { @Override public int compare(String o1, String o2) { return 0; } }, "race.name").isEqualToComparingOnlyGivenFields(elfFrodo); Assertions.setAllowComparingPrivateFields(true); } @Test public void usingFieldByFieldElementComparatorTest() throws Exception { List<Animal> animals = new ArrayList<>(); Bird bird = new Bird("White"); Snake snake = new Snake(15); animals.add(bird); animals.add(snake); assertThat(animals).usingFieldByFieldElementComparator() .containsExactly(bird, snake); } private class Animal { private final String name; private Animal(String name) { this.name = name; } @SuppressWarnings("unused") public String getName() { return name; }
<<<<<<< this.timeReference = this.field_4557 = Util.getMeasuringTimeMs(); ======= this.timeReference = this.field_4557 = SystemUtil.getMeasuringTimeMs(); carpetMsptAccum = TickSpeed.mspt; >>>>>>> this.timeReference = this.field_4557 = Util.getMeasuringTimeMs(); carpetMsptAccum = TickSpeed.mspt; <<<<<<< mspt = TickSpeed.mspt; // regular tick long_1 = Util.getMeasuringTimeMs() - this.timeReference; ======= if (Math.abs(carpetMsptAccum - TickSpeed.mspt) > 1.0f) { // Tickrate changed. Ensure that we use the correct value. carpetMsptAccum = TickSpeed.mspt; } msThisTick = (long)carpetMsptAccum; // regular tick carpetMsptAccum += TickSpeed.mspt - msThisTick; long_1 = SystemUtil.getMeasuringTimeMs() - this.timeReference; >>>>>>> if (Math.abs(carpetMsptAccum - TickSpeed.mspt) > 1.0f) { // Tickrate changed. Ensure that we use the correct value. carpetMsptAccum = TickSpeed.mspt; } msThisTick = (long)carpetMsptAccum; // regular tick carpetMsptAccum += TickSpeed.mspt - msThisTick; long_1 = Util.getMeasuringTimeMs() - this.timeReference; <<<<<<< this.field_19248 = Math.max(Util.getMeasuringTimeMs() + /*50L*/ mspt, this.timeReference); ======= this.field_19248 = Math.max(SystemUtil.getMeasuringTimeMs() + /*50L*/ msThisTick, this.timeReference); >>>>>>> this.field_19248 = Math.max(Util.getMeasuringTimeMs() + /*50L*/ msThisTick, this.timeReference);
<<<<<<< * {@link DragonBallGraph} specific assertions - Generated by CustomAssertionGenerator. ======= * {@link DragonBallGraphRepository} specific assertions - Generated by me. >>>>>>> * {@link DragonBallGraphRepository} specific assertions - Generated by me. <<<<<<< @javax.annotation.Generated(value="assertj-assertions-generator") public class DragonBallGraphAssert extends AbstractDragonBallGraphAssert<DragonBallGraphAssert, DragonBallGraph> { ======= public class DragonBallGraphAssert extends AbstractDragonBallGraphAssert<DragonBallGraphAssert, DragonBallGraphRepository> { >>>>>>> public class DragonBallGraphAssert extends AbstractDragonBallGraphAssert<DragonBallGraphAssert, DragonBallGraphRepository> { <<<<<<< @org.assertj.core.util.CheckReturnValue public static DragonBallGraphAssert assertThat(DragonBallGraph actual) { ======= public static DragonBallGraphAssert assertThat(DragonBallGraphRepository actual) { >>>>>>> @org.assertj.core.util.CheckReturnValue public static DragonBallGraphAssert assertThat(DragonBallGraphRepository actual) {
<<<<<<< if (!isShared && (module == null || module.getName() == null)) return null; File dataFile = CarpetServer.minecraft_server.getSavePath(WorldSavePath.ROOT).resolve("scripts/"+getDescriptor(module, file, isShared)+".nbt").toFile(); ======= File dataFile = resolveResource(module, file, "nbt", isShared); if (dataFile == null) return null; >>>>>>> File dataFile = resolveResource(module, file, "nbt", isShared); if (dataFile == null) return null; if (!isShared && (module == null || module.getName() == null)) return null; File dataFile = CarpetServer.minecraft_server.getSavePath(WorldSavePath.ROOT).resolve("scripts/"+getDescriptor(module, file, isShared)+".nbt").toFile(); <<<<<<< if (!isShared && (module == null || module.getName() == null)) return false; File dataFile =CarpetServer.minecraft_server.getSavePath(WorldSavePath.ROOT).resolve("scripts/"+getDescriptor(module, file, isShared)+".nbt").toFile(); ======= File dataFile = resolveResource(module, file, "nbt", isShared); if (dataFile == null) return false; if (!Files.exists(dataFile.toPath().getParent()) && !dataFile.getParentFile().mkdirs()) return false; synchronized (writeIOSync) { return FileModule.write(globalState, dataFile); } } public static boolean appendToTextFile(Module module, String resourceName, String type, boolean isShared, List<String> message) { File dataFile = resolveResource(module, resourceName, "txt", isShared); if (dataFile == null) return false; >>>>>>> File dataFile = resolveResource(module, file, "nbt", isShared); if (dataFile == null) return false; if (!isShared && (module == null || module.getName() == null)) return false; File dataFile =CarpetServer.minecraft_server.getSavePath(WorldSavePath.ROOT).resolve("scripts/"+getDescriptor(module, file, isShared)+".nbt").toFile(); if (!Files.exists(dataFile.toPath().getParent()) && !dataFile.getParentFile().mkdirs()) return false; synchronized (writeIOSync) { return FileModule.write(globalState, dataFile); } } public static boolean appendToTextFile(Module module, String resourceName, String type, boolean isShared, List<String> message) { File dataFile = resolveResource(module, resourceName, "txt", isShared); if (dataFile == null) return false;
<<<<<<< DP3T.sendIAmInfected(context, onsetDate, new ExposeeAuthData(codeInputBase64), new CallbackListener<Void>() { ======= DP3T.sendIWasExposed(context, onsetDate, new ExposeeAuthData(codeInputBase64), null, new CallbackListener<Void>() { >>>>>>> DP3T.sendIAmInfected(context, onsetDate, new ExposeeAuthData(codeInputBase64), null, new CallbackListener<Void>() {
<<<<<<< import net.minecraft.client.render.VertexConsumerProvider; import net.minecraft.client.world.ClientWorld; ======= >>>>>>> import net.minecraft.client.render.VertexConsumerProvider; import net.minecraft.client.world.ClientWorld; <<<<<<< import net.minecraft.util.registry.Registry; import net.minecraft.util.registry.RegistryKey; import net.minecraft.world.World; ======= import net.minecraft.world.IWorld; import net.minecraft.world.dimension.DimensionType; >>>>>>> import net.minecraft.util.registry.RegistryKey; import net.minecraft.world.World; <<<<<<< private final Map<RegistryKey<World>, Int2ObjectOpenHashMap<RenderedShape<? extends ShapeDispatcher.ExpiringShape>>> shapes; ======= private final Map<DimensionType, Long2ObjectOpenHashMap<RenderedShape<? extends ShapeDispatcher.ExpiringShape>>> shapes; >>>>>>> private final Map<RegistryKey<World>, Long2ObjectOpenHashMap<RenderedShape<? extends ShapeDispatcher.ExpiringShape>>> shapes; <<<<<<< shapes.put(World.OVERWORLD, new Int2ObjectOpenHashMap<>()); shapes.put(World.NETHER, new Int2ObjectOpenHashMap<>()); shapes.put(World.END, new Int2ObjectOpenHashMap<>()); ======= shapes.put(DimensionType.OVERWORLD, new Long2ObjectOpenHashMap<>()); shapes.put(DimensionType.THE_NETHER, new Long2ObjectOpenHashMap<>()); shapes.put(DimensionType.THE_END, new Long2ObjectOpenHashMap<>()); >>>>>>> shapes.put(World.OVERWORLD, new Long2ObjectOpenHashMap<>()); shapes.put(World.NETHER, new Long2ObjectOpenHashMap<>()); shapes.put(World.END, new Long2ObjectOpenHashMap<>()); <<<<<<< RegistryKey<World> dim = RegistryKey.of(Registry.DIMENSION, new Identifier(tag.getString("dim"))); int key = rshape.key(); ======= DimensionType dim = shape.shapeDimension; long key = rshape.key(); >>>>>>> RegistryKey<World> dim = shape.shapeDimension; long key = rshape.key(); <<<<<<< if (client.world.getRegistryKey() != dim) return false; ======= >>>>>>> if (client.world.getRegistryKey() != dim) return false;
<<<<<<< private long minTimeToReconnectToSameDevice = DEFAULT_SCAN_INTERVAL; private Map<String, Long> deviceLastConnected = new HashMap<>(); ======= >>>>>>> <<<<<<< public void setMinTimeToReconnectToSameDevice(long minTimeToReconnectToSameDevice) { this.minTimeToReconnectToSameDevice = minTimeToReconnectToSameDevice; } public BluetoothState start() { ======= public void start() { >>>>>>> public BluetoothState start() { <<<<<<< return BluetoothState.ENABLED; } ======= >>>>>>> return BluetoothState.ENABLED; }
<<<<<<< module.checkContacts(key.getBytes(), new DayDate().subtractDays(NUMBER_OF_DAYS_TO_TEST), new DayDate(), (date -> { ArrayList<Contact> contacts = new ArrayList<>(); for (int x = 0; x < NUMBER_OF_CONTACTS_PER_DAY; x++) { contacts.add(new Contact(0, new DayDate(), new EphId(new byte[CryptoModule.EPHID_LENGTH]), 0)); } return contacts; }), (contact -> {})); ======= module.checkContacts(key.getBytes(), new DayDate().subtractDays(NUMBER_OF_DAYS_TO_TEST).getStartOfDayTimestamp(), System.currentTimeMillis(), (timeFrom, timeUntil) -> { ArrayList<Contact> contacts = new ArrayList<>(); for (int x = 0; x < NUMBER_OF_CONTACTS_PER_DAY; x++) { contacts.add(new Contact(0, new DayDate().getStartOfDayTimestamp(), new EphId(new byte[CryptoModule.KEY_LENGTH]), 0)); } return contacts; }, contact -> {}); >>>>>>> module.checkContacts(key.getBytes(), new DayDate().subtractDays(NUMBER_OF_DAYS_TO_TEST).getStartOfDayTimestamp(), System.currentTimeMillis(), (timeFrom, timeUntil) -> { ArrayList<Contact> contacts = new ArrayList<>(); for (int x = 0; x < NUMBER_OF_CONTACTS_PER_DAY; x++) { contacts.add(new Contact(0, new DayDate().getStartOfDayTimestamp(), new EphId(new byte[CryptoModule.EPHID_LENGTH]), 0)); } return contacts; }, contact -> {});
<<<<<<< public Value validate(Map<String, Value> options, CarpetContext cc, Value value) { return value; } @Override public boolean appliesTo(ExpiringShape shape) { return true; } @Override public String identify() { return "dim"; } ======= public Value validate(Map<String, Value> options, CarpetContext cc, Value value) { String dimStr = value.getString(); Optional<DimensionType> dimOp = Registry.DIMENSION_TYPE.getOrEmpty(new Identifier(dimStr)); if (!dimOp.isPresent()) throw new InternalExpressionException("Unknown dimension "+dimStr); return value; } >>>>>>> public Value validate(Map<String, Value> options, CarpetContext cc, Value value) { return value; }
<<<<<<< public BluetoothState start() { BluetoothManager mManager = (BluetoothManager) context.getSystemService(Context.BLUETOOTH_SERVICE); if (mManager == null || !context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH_LE)) { return BluetoothState.NOT_SUPPORTED; } mAdapter = mManager.getAdapter(); if (mAdapter.isEnabled()) { mGattServer = mManager.openGattServer(context, createGattServerCallback()); setupService(); return BluetoothState.ENABLED; } else { return BluetoothState.DISABLED; } } private BluetoothGattServerCallback createGattServerCallback() { return new BluetoothGattServerCallback() { private static final String TAG = "GattServer"; @Override public void onConnectionStateChange(BluetoothDevice device, int status, int newState) { Logger.d(TAG, "Our gatt server connection state changed, new state " + newState); } @Override public void onServiceAdded(int status, BluetoothGattService service) { Logger.d(TAG, "Our gatt server service was added."); } @Override public void onCharacteristicReadRequest(BluetoothDevice device, int requestId, int offset, BluetoothGattCharacteristic characteristic) { Logger.i(TAG, "Our gatt characteristic was read."); mGattServer.sendResponse(device, requestId, BluetoothGatt.GATT_SUCCESS, offset, characteristic.getValue()); } @Override public void onCharacteristicWriteRequest(BluetoothDevice device, int requestId, BluetoothGattCharacteristic characteristic, boolean preparedWrite, boolean responseNeeded, int offset, byte[] value) { Logger.i(TAG, "We have received a write request for one of our hosted characteristics"); } @Override public void onNotificationSent(BluetoothDevice device, int status) { Logger.d(TAG, "onNotificationSent"); } @Override public void onDescriptorReadRequest(BluetoothDevice device, int requestId, int offset, BluetoothGattDescriptor descriptor) { Logger.d(TAG, "Gatt server descriptor was read."); } @Override public void onDescriptorWriteRequest(BluetoothDevice device, int requestId, BluetoothGattDescriptor descriptor, boolean preparedWrite, boolean responseNeeded, int offset, byte[] value) { Logger.d(TAG, "Gatt server descriptor was written."); } @Override public void onExecuteWrite(BluetoothDevice device, int requestId, boolean execute) { Logger.d(TAG, "Gatt server on execute write."); } }; } ======= >>>>>>> <<<<<<< mLeAdvertiser.startAdvertising(settingBuilder.build(), advBuilder.build(), scanResponse, advertiseCallback); return BluetoothState.ENABLED; ======= mLeAdvertiser.startAdvertising(settingBuilder.build(), advBuilder.build(), advertiseCallback); >>>>>>> mLeAdvertiser.startAdvertising(settingBuilder.build(), advBuilder.build(), advertiseCallback); return BluetoothState.ENABLED; <<<<<<< if (mGattServer != null) { mGattServer.close(); mGattServer = null; } ======= >>>>>>>
<<<<<<< import io.netflix.titus.api.jobmanager.model.job.ServiceJobProcesses; ======= import io.netflix.titus.api.jobmanager.model.job.migration.MigrationPolicy; >>>>>>> import io.netflix.titus.api.jobmanager.model.job.ServiceJobProcesses; import io.netflix.titus.api.jobmanager.model.job.migration.MigrationPolicy; <<<<<<< @Valid private final ServiceJobProcesses serviceJobProcesses; ======= @Valid private final MigrationPolicy migrationPolicy; >>>>>>> @Valid private final ServiceJobProcesses serviceJobProcesses; @Valid private final MigrationPolicy migrationPolicy; <<<<<<< public ServiceJobProcesses getServiceJobProcesses() { return serviceJobProcesses; } ======= public MigrationPolicy getMigrationPolicy() { return migrationPolicy; } >>>>>>> public ServiceJobProcesses getServiceJobProcesses() { return serviceJobProcesses; } public MigrationPolicy getMigrationPolicy() { return migrationPolicy; } <<<<<<< if (enabled != that.enabled) return false; if (!capacity.equals(that.capacity)) return false; if (!retryPolicy.equals(that.retryPolicy)) return false; return serviceJobProcesses.equals(that.serviceJobProcesses); ======= if (enabled != that.enabled) { return false; } if (capacity != null ? !capacity.equals(that.capacity) : that.capacity != null) { return false; } if (retryPolicy != null ? !retryPolicy.equals(that.retryPolicy) : that.retryPolicy != null) { return false; } return migrationPolicy != null ? migrationPolicy.equals(that.migrationPolicy) : that.migrationPolicy == null; >>>>>>> if (enabled != that.enabled) { return false; } if (capacity != null ? !capacity.equals(that.capacity) : that.capacity != null) { return false; } if (retryPolicy != null ? !retryPolicy.equals(that.retryPolicy) : that.retryPolicy != null) { return false; } if (serviceJobProcesses != null ? serviceJobProcesses.equals(that.serviceJobProcesses) : that.serviceJobProcesses != null) { return false; } return migrationPolicy != null ? migrationPolicy.equals(that.migrationPolicy) : that.migrationPolicy == null; <<<<<<< result = 31 * result + retryPolicy.hashCode(); result = 31 * result + serviceJobProcesses.hashCode(); ======= result = 31 * result + (retryPolicy != null ? retryPolicy.hashCode() : 0); result = 31 * result + (migrationPolicy != null ? migrationPolicy.hashCode() : 0); >>>>>>> result = 31 * result + retryPolicy.hashCode(); result = 31 * result + (serviceJobProcesses != null ? serviceJobProcesses.hashCode() : 0); result = 31 * result + (migrationPolicy != null ? migrationPolicy.hashCode() : 0); <<<<<<< ", serviceJobProcesses=" + serviceJobProcesses + ======= ", migrationPolicy=" + migrationPolicy + >>>>>>> ", serviceJobProcesses=" + serviceJobProcesses + ", migrationPolicy=" + migrationPolicy + <<<<<<< private ServiceJobProcesses serviceJobProcesses; ======= private MigrationPolicy migrationPolicy; >>>>>>> private ServiceJobProcesses serviceJobProcesses; private MigrationPolicy migrationPolicy; <<<<<<< public Builder withServiceJobProcesses(ServiceJobProcesses serviceJobProcesses) { this.serviceJobProcesses = serviceJobProcesses; return this; } ======= public Builder withMigrationPolicy(MigrationPolicy migrationPolicy) { this.migrationPolicy = migrationPolicy; return this; } >>>>>>> public Builder withServiceJobProcesses(ServiceJobProcesses serviceJobProcesses) { this.serviceJobProcesses = serviceJobProcesses; return this; } public Builder withMigrationPolicy(MigrationPolicy migrationPolicy) { this.migrationPolicy = migrationPolicy; return this; } <<<<<<< return newBuilder().withCapacity(capacity).withEnabled(enabled).withRetryPolicy(retryPolicy).withServiceJobProcesses(serviceJobProcesses); ======= return newBuilder().withCapacity(capacity).withEnabled(enabled).withRetryPolicy(retryPolicy).withMigrationPolicy(migrationPolicy); >>>>>>> return newBuilder().withCapacity(capacity).withEnabled(enabled).withRetryPolicy(retryPolicy) .withServiceJobProcesses(serviceJobProcesses).withMigrationPolicy(migrationPolicy); <<<<<<< ServiceJobExt serviceJobExt = new ServiceJobExt(capacity, enabled, retryPolicy, serviceJobProcesses); return serviceJobExt; ======= return new ServiceJobExt(capacity, enabled, retryPolicy, migrationPolicy); >>>>>>> return new ServiceJobExt(capacity, enabled, retryPolicy, serviceJobProcesses, migrationPolicy);
<<<<<<< @DefaultValue("10000") long getClientRequestTimeoutMs(); ======= @DefaultValue("cell1=(app1.*|app2.*);cell2=(.*)") String getRoutingRules(); >>>>>>> @DefaultValue("10000") long getClientRequestTimeoutMs(); @DefaultValue("cell1=(app1.*|app2.*);cell2=(.*)") String getRoutingRules();
<<<<<<< ======= import io.netflix.titus.master.job.service.ServiceJobMgr; import io.netflix.titus.master.jobmanager.SampleTitusModelUpdateActions; >>>>>>> import io.netflix.titus.master.job.service.ServiceJobMgr;
<<<<<<< private static File histFile = new File(PlaySet.PODCASTS.getRoot(), "history.prop"); ======= private static File siteList = new File(Config.CarCastRoot, "podcasts.txt"); >>>>>>> private static File histFile = new File(PlaySet.PODCASTS.getRoot(), "history.prop"); <<<<<<< public static void eraseHistory() { histFile.delete(); } public static List<String> getHistory() { List<String> history = new ArrayList<String>(); try { DataInputStream dis = new DataInputStream(new FileInputStream(histFile)); String line = null; while ((line = dis.readLine()) != null) { history.add(line); } } catch (Exception e) { Log.e(DownloadHelper.class.getName(), e.toString()); } return history; } ======= >>>>>>>
<<<<<<< debug.append("[necronnotifications][").append(ToggleCommand.necronNotificationsToggled).append("]\n"); debug.append("[bonzotimer][").append(ToggleCommand.bonzoTimerToggled).append("]\n"); debug.append("[blockbreakingfarms][").append(ToggleCommand.blockBreakingFarmsToggled).append("]\n"); ======= debug.append("[autoskilltracker][").append(ToggleCommand.autoSkillTrackerToggled).append("]\n"); >>>>>>> debug.append("[necronnotifications][").append(ToggleCommand.necronNotificationsToggled).append("]\n"); debug.append("[bonzotimer][").append(ToggleCommand.bonzoTimerToggled).append("]\n"); debug.append("[blockbreakingfarms][").append(ToggleCommand.blockBreakingFarmsToggled).append("]\n"); debug.append("[autoskilltracker][").append(ToggleCommand.autoSkillTrackerToggled).append("]\n");
<<<<<<< if (!hasKey("toggles", "NecronNotifications")) writeBooleanConfig("toggles", "NecronNotifications", false); if (!hasKey("toggles", "BonzoTimer")) writeBooleanConfig("toggles", "BonzoTimer", false); if (!hasKey("toggles", "BlockBreakingFarms")) writeBooleanConfig("toggles", "BlockBreakingFarms", false); ======= if (!hasKey("toggles", "AutoSkillTracker")) writeBooleanConfig("toggles", "AutoSkillTracker", false); >>>>>>> if (!hasKey("toggles", "NecronNotifications")) writeBooleanConfig("toggles", "NecronNotifications", false); if (!hasKey("toggles", "BonzoTimer")) writeBooleanConfig("toggles", "BonzoTimer", false); if (!hasKey("toggles", "BlockBreakingFarms")) writeBooleanConfig("toggles", "BlockBreakingFarms", false); if (!hasKey("toggles", "AutoSkillTracker")) writeBooleanConfig("toggles", "AutoSkillTracker", false); <<<<<<< ======= ToggleCommand.autoSkillTrackerToggled = getBoolean("toggles", "AutoSkillTracker"); >>>>>>> ToggleCommand.autoSkillTrackerToggled = getBoolean("toggles", "AutoSkillTracker");
<<<<<<< public static boolean necronNotificationsToggled; public static boolean bonzoTimerToggled; public static boolean blockBreakingFarmsToggled; ======= public static boolean autoSkillTrackerToggled; >>>>>>> public static boolean necronNotificationsToggled; public static boolean bonzoTimerToggled; public static boolean blockBreakingFarmsToggled; public static boolean autoSkillTrackerToggled; <<<<<<< case "necronnotifications": necronNotificationsToggled = !necronNotificationsToggled; ConfigHandler.writeBooleanConfig("toggles", "NecronNotifications", necronNotificationsToggled); player.addChatMessage(new ChatComponentText(DankersSkyblockMod.MAIN_COLOUR + "Necron phase notifications has been set to " + DankersSkyblockMod.SECONDARY_COLOUR + necronNotificationsToggled + DankersSkyblockMod.MAIN_COLOUR + ".")); break; case "bonzotimer": bonzoTimerToggled = !bonzoTimerToggled; ConfigHandler.writeBooleanConfig("toggles", "BonzoTimer", bonzoTimerToggled); player.addChatMessage(new ChatComponentText(DankersSkyblockMod.MAIN_COLOUR + "Bonzo's Mask timer has been set to " + DankersSkyblockMod.SECONDARY_COLOUR + necronNotificationsToggled + DankersSkyblockMod.MAIN_COLOUR + ".")); break; case "blockbreakingfarms": blockBreakingFarmsToggled = !blockBreakingFarmsToggled; ConfigHandler.writeBooleanConfig("toggles", "BlockBreakingFarms", blockBreakingFarmsToggled); player.addChatMessage(new ChatComponentText(DankersSkyblockMod.MAIN_COLOUR + "Prevent breaking farms has been set to " + DankersSkyblockMod.SECONDARY_COLOUR + necronNotificationsToggled + DankersSkyblockMod.MAIN_COLOUR + ".")); break; ======= case "autoskilltracker": autoSkillTrackerToggled = !autoSkillTrackerToggled; ConfigHandler.writeBooleanConfig("toggles", "AutoSkillTracker", autoSkillTrackerToggled); player.addChatMessage(new ChatComponentText(DankersSkyblockMod.MAIN_COLOUR + "Auto start/stop skill tracker has been set to " + DankersSkyblockMod.SECONDARY_COLOUR + autoSkillTrackerToggled + DankersSkyblockMod.MAIN_COLOUR + ".")); break; >>>>>>> case "necronnotifications": necronNotificationsToggled = !necronNotificationsToggled; ConfigHandler.writeBooleanConfig("toggles", "NecronNotifications", necronNotificationsToggled); player.addChatMessage(new ChatComponentText(DankersSkyblockMod.MAIN_COLOUR + "Necron phase notifications has been set to " + DankersSkyblockMod.SECONDARY_COLOUR + necronNotificationsToggled + DankersSkyblockMod.MAIN_COLOUR + ".")); break; case "bonzotimer": bonzoTimerToggled = !bonzoTimerToggled; ConfigHandler.writeBooleanConfig("toggles", "BonzoTimer", bonzoTimerToggled); player.addChatMessage(new ChatComponentText(DankersSkyblockMod.MAIN_COLOUR + "Bonzo's Mask timer has been set to " + DankersSkyblockMod.SECONDARY_COLOUR + necronNotificationsToggled + DankersSkyblockMod.MAIN_COLOUR + ".")); break; case "blockbreakingfarms": blockBreakingFarmsToggled = !blockBreakingFarmsToggled; ConfigHandler.writeBooleanConfig("toggles", "BlockBreakingFarms", blockBreakingFarmsToggled); player.addChatMessage(new ChatComponentText(DankersSkyblockMod.MAIN_COLOUR + "Prevent breaking farms has been set to " + DankersSkyblockMod.SECONDARY_COLOUR + necronNotificationsToggled + DankersSkyblockMod.MAIN_COLOUR + ".")); case "autoskilltracker": autoSkillTrackerToggled = !autoSkillTrackerToggled; ConfigHandler.writeBooleanConfig("toggles", "AutoSkillTracker", autoSkillTrackerToggled); player.addChatMessage(new ChatComponentText(DankersSkyblockMod.MAIN_COLOUR + "Auto start/stop skill tracker has been set to " + DankersSkyblockMod.SECONDARY_COLOUR + autoSkillTrackerToggled + DankersSkyblockMod.MAIN_COLOUR + ".")); break;
<<<<<<< private GuiButton necronNotifications; private GuiButton bonzoTimer; private GuiButton blockBreakingFarms; ======= private GuiButton autoSkillTracker; >>>>>>> private GuiButton necronNotifications; private GuiButton bonzoTimer; private GuiButton blockBreakingFarms; private GuiButton autoSkillTracker; <<<<<<< flowerWeapons = new GuiButton(0, width / 2 - 100, (int) (height * 0.5), "Prevent Placing FoT/Spirit Sceptre: " + Utils.getColouredBoolean(ToggleCommand.flowerWeaponsToggled)); necronNotifications = new GuiButton(0, width / 2 - 100, (int) (height * 0.6), "Necron Phase Notifications: " + Utils.getColouredBoolean(ToggleCommand.necronNotificationsToggled)); bonzoTimer = new GuiButton(0, width / 2 - 100, (int) (height * 0.7), "Bonzo's Mask Timer: " + Utils.getColouredBoolean(ToggleCommand.bonzoTimerToggled)); //Page 6 blockBreakingFarms = new GuiButton(0, width / 2 - 100, (int) (height * 0.1), "Prevent Breaking Farms: " + Utils.getColouredBoolean(ToggleCommand.blockBreakingFarmsToggled)); ======= flowerWeapons = new GuiButton(0, width / 2 - 100, (int) (height * 0.5), "Prevent Placing FoT/Spirit Sceptre: " + Utils.getColouredBoolean(ToggleCommand.flowerWeaponsToggled)); autoSkillTracker = new GuiButton(0, width / 2 - 100, (int) (height * 0.6), "Auto Start/Stop Skill Tracker: " + Utils.getColouredBoolean(ToggleCommand.autoSkillTrackerToggled)); >>>>>>> flowerWeapons = new GuiButton(0, width / 2 - 100, (int) (height * 0.5), "Prevent Placing FoT/Spirit Sceptre: " + Utils.getColouredBoolean(ToggleCommand.flowerWeaponsToggled)); necronNotifications = new GuiButton(0, width / 2 - 100, (int) (height * 0.6), "Necron Phase Notifications: " + Utils.getColouredBoolean(ToggleCommand.necronNotificationsToggled)); bonzoTimer = new GuiButton(0, width / 2 - 100, (int) (height * 0.7), "Bonzo's Mask Timer: " + Utils.getColouredBoolean(ToggleCommand.bonzoTimerToggled)); // Page 6 blockBreakingFarms = new GuiButton(0, width / 2 - 100, (int) (height * 0.1), "Prevent Breaking Farms: " + Utils.getColouredBoolean(ToggleCommand.blockBreakingFarmsToggled)); autoSkillTracker = new GuiButton(0, width / 2 - 100, (int) (height * 0.2), "Auto Start/Stop Skill Tracker: " + Utils.getColouredBoolean(ToggleCommand.autoSkillTrackerToggled)); <<<<<<< this.buttonList.add(flowerWeapons); this.buttonList.add(necronNotifications); this.buttonList.add(bonzoTimer); this.buttonList.add(nextPage); this.buttonList.add(backPage); break; case 6: this.buttonList.add(blockBreakingFarms); ======= this.buttonList.add(flowerWeapons); this.buttonList.add(autoSkillTracker); >>>>>>> this.buttonList.add(flowerWeapons); this.buttonList.add(necronNotifications); this.buttonList.add(bonzoTimer); this.buttonList.add(nextPage); this.buttonList.add(backPage); break; case 6: this.buttonList.add(blockBreakingFarms); this.buttonList.add(autoSkillTracker);
<<<<<<< import java.util.concurrent.ConcurrentHashMap; ======= import java.util.HashMap; import java.util.concurrent.TimeUnit; >>>>>>> import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit;
<<<<<<< CallbackInfoReturnable<Boolean> cir, BlockPos blockPos_2, PistonHandler pistonHandler_1, Map map_1, List<BlockPos> list_1, List<BlockState> list_2, List list_3, int int_2, BlockState blockStates_1[], Direction direction_2) ======= CallbackInfoReturnable<Boolean> cir, BlockPos blockPos_2, PistonHandler pistonHandler_1, List<BlockPos> list_1, List<BlockState> list_2, List list_3, int int_2, BlockState[] blockStates_1, Direction direction_2, Set set_1) >>>>>>> CallbackInfoReturnable<Boolean> cir, BlockPos blockPos_2, PistonHandler pistonHandler_1, Map map_1, List<BlockPos> list_1, List<BlockState> list_2, List list_3, int int_2, BlockState[] blockStates_1, Direction direction_2) <<<<<<< CallbackInfoReturnable<Boolean> cir, BlockPos blockPos_2, PistonHandler pistonHandler_1, Map map_1, List list_1, List list_2, List list_3, int int_2, BlockState blockStates_1[], Direction direction_2, int int_3, BlockPos blockPos_4, BlockState blockState_1) ======= CallbackInfoReturnable<Boolean> cir, BlockPos blockPos_2, PistonHandler pistonHandler_1, List list_1, List list_2, List list_3, int int_2, BlockState[] blockStates_1, Direction direction_2, Set set_1, int int_3, BlockPos blockPos_4, BlockState blockState_1) >>>>>>> CallbackInfoReturnable<Boolean> cir, BlockPos blockPos_2, PistonHandler pistonHandler_1, Map map_1, List list_1, List list_2, List list_3, int int_2, BlockState[] blockStates_1, Direction direction_2, int int_3, BlockPos blockPos_4, BlockState blockState_1)
<<<<<<< import org.apache.mina.common.*; import org.apache.mina.filter.ReadThrottleFilterBuilder; import org.apache.mina.filter.SSLFilter; ======= import org.apache.mina.core.buffer.IoBuffer; import org.apache.mina.core.service.IoService; import org.apache.mina.core.service.IoServiceListener; import org.apache.mina.core.session.IoSession; import org.apache.mina.core.buffer.SimpleBufferAllocator; import org.apache.mina.core.session.IdleStatus; >>>>>>> import org.apache.mina.core.buffer.IoBuffer; import org.apache.mina.core.buffer.SimpleBufferAllocator; import org.apache.mina.core.service.IoService; import org.apache.mina.core.service.IoServiceListener; import org.apache.mina.core.session.IdleStatus; import org.apache.mina.core.session.IoSession; <<<<<<< private static final Logger Log = LoggerFactory.getLogger(ConnectionManagerImpl.class); private static final int MB = 1024 * 1024; ======= public static final String EXECUTOR_FILTER_NAME = "threadPool"; public static final String TLS_FILTER_NAME = "tls"; public static final String COMPRESSION_FILTER_NAME = "compression"; public static final String XMPP_CODEC_FILTER_NAME = "xmpp"; public static final String CAPACITY_FILTER_NAME = "outCap"; >>>>>>> private static final int MB = 1024 * 1024; public static final String EXECUTOR_FILTER_NAME = "threadPool"; public static final String TLS_FILTER_NAME = "tls"; public static final String COMPRESSION_FILTER_NAME = "compression"; public static final String XMPP_CODEC_FILTER_NAME = "xmpp"; public static final String CAPACITY_FILTER_NAME = "outCap"; <<<<<<< ThreadFactory threadFactory = eventExecutor.getThreadFactory(); threadFactory = new DelegatingThreadFactory("Old SSL executor thread - ", threadFactory); eventExecutor.setThreadFactory(threadFactory); eventExecutor.setCorePoolSize(eventThreads + 1); eventExecutor.setMaximumPoolSize(eventThreads + 1); eventExecutor.setKeepAliveTime(60, TimeUnit.SECONDS); sslSocketAcceptor.getDefaultConfig().setThreadModel(ThreadModel.MANUAL); ======= final ThreadFactory originalThreadFactory = eventExecutor.getThreadFactory(); ThreadFactory newThreadFactory = new ThreadFactory() { private final AtomicInteger threadId = new AtomicInteger( 0 ); public Thread newThread( Runnable runnable ) { Thread t = originalThreadFactory.newThread( runnable ); t.setName("Old SSL executor thread - " + threadId.incrementAndGet() ); t.setDaemon( true ); return t; } }; eventExecutor.setThreadFactory( newThreadFactory ); // Create SocketAcceptor with correct number of processors sslSocketAcceptor = buildSocketAcceptor(CLIENT_SSL_SOCKET_ACCEPTOR_NAME); sslSocketAcceptor.getFilterChain().addFirst(EXECUTOR_FILTER_NAME, executorFilter); >>>>>>> final ThreadFactory originalThreadFactory = eventExecutor.getThreadFactory(); ThreadFactory newThreadFactory = new ThreadFactory() { private final AtomicInteger threadId = new AtomicInteger( 0 ); public Thread newThread( Runnable runnable ) { Thread t = originalThreadFactory.newThread( runnable ); t.setName("Old SSL executor thread - " + threadId.incrementAndGet() ); t.setDaemon( true ); return t; } }; eventExecutor.setThreadFactory( newThreadFactory ); // Create SocketAcceptor with correct number of processors sslSocketAcceptor = buildSocketAcceptor(CLIENT_SSL_SOCKET_ACCEPTOR_NAME); sslSocketAcceptor.getFilterChain().addFirst(EXECUTOR_FILTER_NAME, executorFilter); <<<<<<< sslSocketAcceptor.getFilterChain().addAfter("xmpp", "outCap", new StalledSessionsFilter()); // Throttle sessions who send data too fast int maxBufferSize = JiveGlobals.getIntProperty("xmpp.client_ssl.maxReadBufferSize", 10 * MB); installReadThrottle(sslSocketAcceptor, maxBufferSize); ======= sslSocketAcceptor.getFilterChain().addAfter(XMPP_CODEC_FILTER_NAME, CAPACITY_FILTER_NAME, new StalledSessionsFilter()); >>>>>>> sslSocketAcceptor.getFilterChain().addAfter(XMPP_CODEC_FILTER_NAME, CAPACITY_FILTER_NAME, new StalledSessionsFilter()); // Throttle sessions who send data too fast int maxBufferSize = JiveGlobals.getIntProperty(ConnectionSettings.Client.MAX_READ_BUFFER_SSL, 10 * MB); sslSocketAcceptor.getSessionConfig().setMaxReadBufferSize(maxBufferSize); Log.debug("Throttling read buffer for connections from socketAcceptor={} to max={} bytes", socketAcceptor, maxBufferSize); <<<<<<< SSLFilter sslFilter = new SSLFilter(sslContext); if (JiveGlobals.getProperty(ConnectionSettings.Client.AUTH_PER_CLIENTCERT_POLICY,"disabled").equals("needed")) { ======= SslFilter sslFilter = new SslFilter(sslContext); if (JiveGlobals.getProperty("xmpp.client.cert.policy","disabled").equals("needed")) { >>>>>>> SslFilter sslFilter = new SslFilter(sslContext); if (JiveGlobals.getProperty(ConnectionSettings.Client.AUTH_PER_CLIENTCERT_POLICY,"disabled").equals("needed")) {
<<<<<<< ParserOptions.Builder optionsBuilder = ParserOptions.builder(); optionsBuilder.xsdFilename(System.getProperty(SbeTool.VALIDATION_XSD)); optionsBuilder.stopOnError(Boolean.parseBoolean(System.getProperty(SbeTool.VALIDATION_STOP_ON_ERROR))); optionsBuilder.warningsFatal(Boolean.parseBoolean(System.getProperty(SbeTool.VALIDATION_WARNINGS_FATAL))); optionsBuilder.suppressOutput(Boolean.parseBoolean(System.getProperty(SbeTool.VALIDATION_SUPPRESS_OUTPUT))); ParserOptions options = optionsBuilder.build(); try (final BufferedInputStream in = new BufferedInputStream(new FileInputStream(messageSchemaFileName))) ======= try (final BufferedInputStream in = new BufferedInputStream(new FileInputStream(sbeSchemaFilename))) >>>>>>> ParserOptions.Builder optionsBuilder = ParserOptions.builder(); optionsBuilder.xsdFilename(System.getProperty(SbeTool.VALIDATION_XSD)); optionsBuilder.stopOnError(Boolean.parseBoolean(System.getProperty(SbeTool.VALIDATION_STOP_ON_ERROR))); optionsBuilder.warningsFatal(Boolean.parseBoolean(System.getProperty(SbeTool.VALIDATION_WARNINGS_FATAL))); optionsBuilder.suppressOutput(Boolean.parseBoolean(System.getProperty(SbeTool.VALIDATION_SUPPRESS_OUTPUT))); ParserOptions options = optionsBuilder.build(); try (final BufferedInputStream in = new BufferedInputStream(new FileInputStream(sbeSchemaFilename)))
<<<<<<< @Test public void testBulkInsertDoesYieldByDefault() { when(db.insert(anyString(), anyString(), (ContentValues) anyObject())).thenReturn(2L); int bulkSize = 100; ContentValues[] bulkToInsert = createContentValuesArray(bulkSize); bulkInsert("test.com/table1", bulkToInsert); verify(db, times(bulkSize)).yieldIfContendedSafely(); } @Test public void testWhenSpecifyingAllowYieldQueryParameterAsTrueThanBulkInsertDoesYield() { when(db.insert(anyString(), anyString(), (ContentValues) anyObject())).thenReturn(2L); int bulkSize = 100; ContentValues[] bulkToInsert = createContentValuesArray(bulkSize); bulkInsert("test.com/table1?allowYield=true", bulkToInsert); verify(db, times(bulkSize)).yieldIfContendedSafely(); } @Test public void testWhenSpecifyingAllowYieldQueryParameterAsFalseThanBulkInsertDoesNotYield() { when(db.insert(anyString(), anyString(), (ContentValues) anyObject())).thenReturn(2L); int bulkSize = 100; ContentValues[] bulkToInsert = createContentValuesArray(bulkSize); bulkInsert("test.com/table1?allowYield=false", bulkToInsert); verify(db, never()).yieldIfContendedSafely(); } ======= @Test public void testProvidedNotificationUriSetCorrectly() { query("test.com/view1"); verify(mockCursor).setNotificationUri((ContentResolver) anyObject(), eq(Uri.parse("content://test.com/table1"))); } >>>>>>> @Test public void testBulkInsertDoesYieldByDefault() { when(db.insert(anyString(), anyString(), (ContentValues) anyObject())).thenReturn(2L); int bulkSize = 100; ContentValues[] bulkToInsert = createContentValuesArray(bulkSize); bulkInsert("test.com/table1", bulkToInsert); verify(db, times(bulkSize)).yieldIfContendedSafely(); } @Test public void testWhenSpecifyingAllowYieldQueryParameterAsTrueThanBulkInsertDoesYield() { when(db.insert(anyString(), anyString(), (ContentValues) anyObject())).thenReturn(2L); int bulkSize = 100; ContentValues[] bulkToInsert = createContentValuesArray(bulkSize); bulkInsert("test.com/table1?allowYield=true", bulkToInsert); verify(db, times(bulkSize)).yieldIfContendedSafely(); } @Test public void testWhenSpecifyingAllowYieldQueryParameterAsFalseThanBulkInsertDoesNotYield() { when(db.insert(anyString(), anyString(), (ContentValues) anyObject())).thenReturn(2L); int bulkSize = 100; ContentValues[] bulkToInsert = createContentValuesArray(bulkSize); bulkInsert("test.com/table1?allowYield=false", bulkToInsert); verify(db, never()).yieldIfContendedSafely(); } @Test public void testProvidedNotificationUriSetCorrectly() { query("test.com/view1"); verify(mockCursor).setNotificationUri((ContentResolver) anyObject(), eq(Uri.parse("content://test.com/table1"))); }
<<<<<<< import javafx.css.PseudoClass; import javafx.geometry.Insets; import javafx.geometry.Rectangle2D; import javafx.geometry.Side; ======= import javafx.geometry.Pos; >>>>>>> import javafx.css.PseudoClass; import javafx.geometry.Insets; import javafx.geometry.Rectangle2D; import javafx.geometry.Side; import javafx.geometry.Pos; <<<<<<< syncTabListViewItems(); tabPane.getTabs().addListener((ListChangeListener<Tab>) change -> syncTabListViewItems()); tabListView.getSelectionModel().selectedIndexProperty().addListener((observableValue, oldValue, newValue) -> tabPane.getSelectionModel().select(newValue.intValue())); HBox contentPane = new HBox(); contentPane.getChildren().add(tabListView); contentPane.getChildren().add(tabPane); /* !!! FIXME: Begin of dirty code !!! */ HBox.setMargin(tabPane, new Insets(0, -13, 0, 6)); Rectangle clipRect = new Rectangle(); clipRect.widthProperty().bind(contentPane.widthProperty().subtract(13)); clipRect.heightProperty().bind(contentPane.heightProperty()); contentPane.setClip(clipRect); /* !!! End of dirty code !!! */ getDialogPane().setContent(contentPane); setOnHidden(event -> { ======= getDialogPane().setContent(tabPane); addOnCloseRequest(event -> { >>>>>>> syncTabListViewItems(); tabPane.getTabs().addListener((ListChangeListener<Tab>) change -> syncTabListViewItems()); tabListView.getSelectionModel().selectedIndexProperty().addListener((observableValue, oldValue, newValue) -> tabPane.getSelectionModel().select(newValue.intValue())); HBox contentPane = new HBox(); contentPane.getChildren().add(tabListView); contentPane.getChildren().add(tabPane); /* !!! FIXME: Begin of dirty code !!! */ HBox.setMargin(tabPane, new Insets(0, -13, 0, 6)); Rectangle clipRect = new Rectangle(); clipRect.widthProperty().bind(contentPane.widthProperty().subtract(13)); clipRect.heightProperty().bind(contentPane.heightProperty()); contentPane.setClip(clipRect); /* !!! End of dirty code !!! */ getDialogPane().setContent(contentPane); setOnHidden(event -> {
<<<<<<< mIcon.setId(generateNewViewId()); ======= mIcon.setId(1); >>>>>>> mIcon.setId(generateNewViewId()); <<<<<<< mPrimaryTextView.setId(generateNewViewId()); ======= mPrimaryTextView.setId(2); >>>>>>> mPrimaryTextView.setId(generateNewViewId()); <<<<<<< public final AtomicInteger sNextGeneratedId = new AtomicInteger(1); /** * Generate a value suitable for use in {@link #setId(int)}. * This value will not collide with ID values generated at build time by aapt for R.id. * * @return a generated ID value */ public int generateNewViewId() { for (;;) { final int result = sNextGeneratedId.get(); // aapt-generated IDs have the high byte nonzero; clamp to the range under that. int newValue = result + 1; if (newValue > 0x00FFFFFF) newValue = 1; // Roll over to 1, not 0. if (sNextGeneratedId.compareAndSet(result, newValue)) { return result; } } } ======= >>>>>>> public final AtomicInteger sNextGeneratedId = new AtomicInteger(1); /** * Generate a value suitable for use in {@link #setId(int)}. * This value will not collide with ID values generated at build time by aapt for R.id. * * @return a generated ID value */ public int generateNewViewId() { for (;;) { final int result = sNextGeneratedId.get(); // aapt-generated IDs have the high byte nonzero; clamp to the range under that. int newValue = result + 1; if (newValue > 0x00FFFFFF) newValue = 1; // Roll over to 1, not 0. if (sNextGeneratedId.compareAndSet(result, newValue)) { return result; } } }
<<<<<<< import hudson.Extension; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.BuildTrigger; import hudson.tasks.BuildWrapper; import hudson.tasks.BuildWrapperDescriptor; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.triggers.Trigger; import org.apache.commons.io.FileUtils; ======= import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.Collection; import static org.junit.Assert.*; >>>>>>> import hudson.Extension; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.BuildTrigger; import hudson.tasks.BuildWrapper; import hudson.tasks.BuildWrapperDescriptor; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.triggers.Trigger; import org.apache.commons.io.FileUtils; import java.io.ByteArrayInputStream; import java.io.IOException; import static org.junit.Assert.*; <<<<<<< import java.io.File; import java.util.Collection; import java.util.Iterator; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; ======= import static org.hamcrest.core.StringContains.containsString; >>>>>>> import java.io.File; import java.util.Collection; import java.util.List; import static org.hamcrest.core.StringContains.containsString; <<<<<<< /** * This test unit makes sure that if part of the config.xml file is * deleted it will still load everything else inside the folder. * The test unit expects an IOException is thrown, and the one failed * job fails to load. */ @Issue("JENKINS-22811") @Test public void xmlFileFailsToLoad() throws Exception { MockFolder folder = r.createFolder("folder"); assertNotNull(folder); AbstractProject project = folder.createProject(FreeStyleProject.class, "job1"); AbstractProject project2 = folder.createProject(FreeStyleProject.class, "job2"); AbstractProject project3 = folder.createProject(FreeStyleProject.class, "job3"); File configFile = project.getConfigFile().getFile(); List<String> lines = FileUtils.readLines(configFile).subList(0, 5); configFile.delete(); // Remove half of the config.xml file to make "invalid" or fail to load FileUtils.writeByteArrayToFile(configFile, lines.toString().getBytes()); for (int i = lines.size() / 2; i < lines.size(); i++) { FileUtils.writeStringToFile(configFile, lines.get(i), true); } // Reload Jenkins. r.jenkins.reload(); // Folder assertNotNull("Folder failed to load.", r.jenkins.getItemByFullName("folder")); assertNull("Job should have failed to load.", r.jenkins.getItemByFullName("folder/job1")); assertNotNull("Other job in folder should have loaded.", r.jenkins.getItemByFullName("folder/job2")); assertNotNull("Other job in folder should have loaded.", r.jenkins.getItemByFullName("folder/job3")); } /** * This test unit makes sure that jobs that contain bad get*Action methods will continue to * load the project. */ @LocalData @Issue("JENKINS-22811") @Test public void xmlFileReadExceptionOnLoad() throws Exception { MockFolder d = r.jenkins.getItemByFullName("d", MockFolder.class); assertNotNull(d); Collection<TopLevelItem> items = d.getItems(); assertEquals(5, items.size()); } @TestExtension public static class MockBuildWrapperThrowsError extends BuildWrapper { @Override public Collection<? extends Action> getProjectActions(AbstractProject project){ throw new NullPointerException(); } @Extension public static class DescriptorImpl extends BuildWrapperDescriptor { @Override public boolean isApplicable(AbstractProject<?, ?> item) { return true; } @Override public String getDisplayName() { return null; } } } @TestExtension public static class MockBuilderThrowsError extends Builder { @Override public Collection<? extends Action> getProjectActions(AbstractProject project){ throw new NullPointerException(); } @Extension public static final Descriptor DESCRIPTOR = new DescriptorImpl(); public static class DescriptorImpl extends BuildStepDescriptor { @Override public boolean isApplicable(Class jobType) { return false; } @Override public String getDisplayName() { return null; } } } @TestExtension public static class MockBuildTriggerThrowsError extends Trigger { @Override public Collection<? extends Action> getProjectActions() { throw new NullPointerException(); } @Extension public static final Descriptor DESCRIPTOR = new BuildTrigger.DescriptorImpl(); } @TestExtension public static class MockPublisherThrowsError extends Publisher { @Override public Collection<? extends Action> getProjectActions(AbstractProject project) { throw new NullPointerException(); } @Override public BuildStepMonitor getRequiredMonitorService() { return null; } @Extension public static final Descriptor DESCRIPTOR = new DescriptorImpl(); public static class DescriptorImpl extends BuildStepDescriptor { @Override public boolean isApplicable(Class jobType) { return false; } @Override public String getDisplayName() { return null; } } } ======= @Test public void createProjectFromXMLShouldNoCreateEntities() throws IOException { final String xml = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<!DOCTYPE project[\n" + " <!ENTITY foo SYSTEM \"file:///\">\n" + "]>\n" + "<project>\n" + " <actions/>\n" + " <description>&foo;</description>\n" + " <keepDependencies>false</keepDependencies>\n" + " <properties/>\n" + " <scm class=\"hudson.scm.NullSCM\"/>\n" + " <canRoam>true</canRoam>\n" + " <triggers/>\n" + " <builders/>\n" + " <publishers/>\n" + " <buildWrappers/>\n" + "</project>"; Item foo = r.jenkins.createProjectFromXML("foo", new ByteArrayInputStream(xml.getBytes())); // if no exception then JAXP is swallowing these - so there should be no entity in the description. assertThat(Items.getConfigFile(foo).asString(), containsString("<description/>")); } >>>>>>> /** * This test unit makes sure that if part of the config.xml file is * deleted it will still load everything else inside the folder. * The test unit expects an IOException is thrown, and the one failed * job fails to load. */ @Issue("JENKINS-22811") @Test public void xmlFileFailsToLoad() throws Exception { MockFolder folder = r.createFolder("folder"); assertNotNull(folder); AbstractProject project = folder.createProject(FreeStyleProject.class, "job1"); AbstractProject project2 = folder.createProject(FreeStyleProject.class, "job2"); AbstractProject project3 = folder.createProject(FreeStyleProject.class, "job3"); File configFile = project.getConfigFile().getFile(); List<String> lines = FileUtils.readLines(configFile).subList(0, 5); configFile.delete(); // Remove half of the config.xml file to make "invalid" or fail to load FileUtils.writeByteArrayToFile(configFile, lines.toString().getBytes()); for (int i = lines.size() / 2; i < lines.size(); i++) { FileUtils.writeStringToFile(configFile, lines.get(i), true); } // Reload Jenkins. r.jenkins.reload(); // Folder assertNotNull("Folder failed to load.", r.jenkins.getItemByFullName("folder")); assertNull("Job should have failed to load.", r.jenkins.getItemByFullName("folder/job1")); assertNotNull("Other job in folder should have loaded.", r.jenkins.getItemByFullName("folder/job2")); assertNotNull("Other job in folder should have loaded.", r.jenkins.getItemByFullName("folder/job3")); } /** * This test unit makes sure that jobs that contain bad get*Action methods will continue to * load the project. */ @LocalData @Issue("JENKINS-22811") @Test public void xmlFileReadExceptionOnLoad() throws Exception { MockFolder d = r.jenkins.getItemByFullName("d", MockFolder.class); assertNotNull(d); Collection<TopLevelItem> items = d.getItems(); assertEquals(5, items.size()); } @TestExtension public static class MockBuildWrapperThrowsError extends BuildWrapper { @Override public Collection<? extends Action> getProjectActions(AbstractProject project){ throw new NullPointerException(); } @Extension public static class DescriptorImpl extends BuildWrapperDescriptor { @Override public boolean isApplicable(AbstractProject<?, ?> item) { return true; } @Override public String getDisplayName() { return null; } } } @TestExtension public static class MockBuilderThrowsError extends Builder { @Override public Collection<? extends Action> getProjectActions(AbstractProject project){ throw new NullPointerException(); } @Extension public static final Descriptor DESCRIPTOR = new DescriptorImpl(); public static class DescriptorImpl extends BuildStepDescriptor { @Override public boolean isApplicable(Class jobType) { return false; } @Override public String getDisplayName() { return null; } } } @TestExtension public static class MockBuildTriggerThrowsError extends Trigger { @Override public Collection<? extends Action> getProjectActions() { throw new NullPointerException(); } @Extension public static final Descriptor DESCRIPTOR = new BuildTrigger.DescriptorImpl(); } @TestExtension public static class MockPublisherThrowsError extends Publisher { @Override public Collection<? extends Action> getProjectActions(AbstractProject project) { throw new NullPointerException(); } @Override public BuildStepMonitor getRequiredMonitorService() { return null; } @Extension public static final Descriptor DESCRIPTOR = new DescriptorImpl(); public static class DescriptorImpl extends BuildStepDescriptor { @Override public boolean isApplicable(Class jobType) { return false; } @Override public String getDisplayName() { return null; } } } @Test public void createProjectFromXMLShouldNoCreateEntities() throws IOException { final String xml = "<?xml version='1.0' encoding='UTF-8'?>\n" + "<!DOCTYPE project[\n" + " <!ENTITY foo SYSTEM \"file:///\">\n" + "]>\n" + "<project>\n" + " <actions/>\n" + " <description>&foo;</description>\n" + " <keepDependencies>false</keepDependencies>\n" + " <properties/>\n" + " <scm class=\"hudson.scm.NullSCM\"/>\n" + " <canRoam>true</canRoam>\n" + " <triggers/>\n" + " <builders/>\n" + " <publishers/>\n" + " <buildWrappers/>\n" + "</project>"; Item foo = r.jenkins.createProjectFromXML("foo", new ByteArrayInputStream(xml.getBytes())); // if no exception then JAXP is swallowing these - so there should be no entity in the description. assertThat(Items.getConfigFile(foo).asString(), containsString("<description/>")); }
<<<<<<< assertThat(result.stderr(), containsString("user is missing the Agent/Configure permission")); assertThat(result, failedWith(-1)); ======= assertThat(result.stderr(), containsString("ERROR: user is missing the Slave/Configure permission")); assertThat(result, failedWith(6)); >>>>>>> assertThat(result.stderr(), containsString("ERROR: user is missing the Agent/Configure permission")); assertThat(result, failedWith(6));
<<<<<<< } catch (XStreamException e) { ======= } catch (MissingFieldException e) { if (critical) { throw e; } LOGGER.log(WARNING,"Skipping a non-existent field "+e.getFieldName(),e); addErrorInContext(context, e); } catch (CannotResolveClassException e) { if (critical) { throw e; } LOGGER.log(WARNING,"Skipping a non-existent type",e); >>>>>>> } catch (XStreamException e) { if (critical) { throw e; } <<<<<<< ======= if (critical) { throw e; } LOGGER.log(WARNING,"Failed to resolve a type",e); >>>>>>> if (critical) { throw e; }
<<<<<<< import net.minecraft.util.registry.RegistryKey; import net.minecraft.world.World; ======= import net.minecraft.util.Identifier; import net.minecraft.util.registry.Registry; import net.minecraft.world.IWorld; import net.minecraft.world.dimension.DimensionType; >>>>>>> import net.minecraft.util.Identifier; import net.minecraft.util.registry.Registry; import net.minecraft.util.registry.RegistryKey; import net.minecraft.world.World; <<<<<<< public void addShape(String type, RegistryKey<World> dim, CompoundTag tag) ======= public void addShape(CompoundTag tag) >>>>>>> public void addShape(CompoundTag tag)
<<<<<<< Value ret = ListValue.wrap(store.getInCircle(condition, pos, (int)radius, status).map( p -> ======= Value ret = ListValue.wrap(store.get(condition, pos, (int)radius, status).sorted(Comparator.comparingDouble(p -> p.getPos().getSquaredDistance(pos))).map(p -> >>>>>>> Value ret = ListValue.wrap(store.getInCircle(condition, pos, (int)radius, status).sorted(Comparator.comparingDouble(p -> p.getPos().getSquaredDistance(pos))).map(p ->
<<<<<<< public boolean plopAnywhere(ServerWorld world, BlockPos pos); public boolean plopAnywhere(ServerWorld world, BlockPos pos, ChunkGenerator<? extends ChunkGeneratorConfig> generator); ======= boolean plopAnywhere(IWorld world, BlockPos pos); boolean plopAnywhere(IWorld world, BlockPos pos, ChunkGenerator<? extends ChunkGeneratorConfig> generator); >>>>>>> boolean plopAnywhere(ServerWorld world, BlockPos pos); boolean plopAnywhere(ServerWorld world, BlockPos pos, ChunkGenerator<? extends ChunkGeneratorConfig> generator);
<<<<<<< import hudson.remoting.Callable; ======= import jenkins.model.Jenkins; >>>>>>> import hudson.remoting.Callable; <<<<<<< import java.io.Console; import java.io.IOException; import jenkins.model.Jenkins; import jenkins.security.ImpersonatingUserDetailsService; import jenkins.security.SecurityListener; ======= import jenkins.security.MasterToSlaveCallable; >>>>>>> import java.io.Console; import java.io.IOException; import jenkins.model.Jenkins; import jenkins.security.ImpersonatingUserDetailsService; import jenkins.security.SecurityListener; import jenkins.security.MasterToSlaveCallable; <<<<<<< private static class InteractivelyAskForPassword implements Callable<String,IOException> { ======= private static class InteractivelyAskForPassword extends MasterToSlaveCallable<String,IOException> { @IgnoreJRERequirement >>>>>>> private static class InteractivelyAskForPassword extends MasterToSlaveCallable<String,IOException> {
<<<<<<< player.world.setBlockBreakingInfo(-1, ap.currentBlock, -1); player.interactionManager.processBlockBreakingAction(ap.currentBlock, PlayerActionC2SPacket.Action.ABORT_DESTROY_BLOCK, Direction.DOWN, player.server.getWorldHeight()); ======= player.world.setBlockBreakingProgress(-1, ap.currentBlock, -1); player.interactionManager.method_14263(ap.currentBlock, PlayerActionC2SPacket.Action.ABORT_DESTROY_BLOCK, Direction.DOWN, player.server.getWorldHeight()); ap.currentBlock = null; >>>>>>> player.world.setBlockBreakingInfo(-1, ap.currentBlock, -1); player.interactionManager.processBlockBreakingAction(ap.currentBlock, PlayerActionC2SPacket.Action.ABORT_DESTROY_BLOCK, Direction.DOWN, player.server.getWorldHeight()); ap.currentBlock = null;