conflict_resolution
stringlengths
27
16k
<<<<<<< private final String path; private final SharedGroup sharedGroup; ======= private SharedGroup sharedGroup; >>>>>>> private final String path; private SharedGroup sharedGroup;
<<<<<<< import io.realm.internal.async.QueryUpdateTask; import io.realm.internal.log.RealmLog; ======= import io.realm.annotations.Required; >>>>>>> import io.realm.internal.async.QueryUpdateTask; import io.realm.internal.log.RealmLog; import io.realm.annotations.Required; <<<<<<< ======= private static final String LINK_NOT_SUPPORTED_METHOD = "'%s' is not supported for link queries"; private static final String TYPE_MISMATCH = "Field '%s': type mismatch - %s expected."; >>>>>>> private static final String LINK_NOT_SUPPORTED_METHOD = "'%s' is not supported for link queries"; private static final String TYPE_MISMATCH = "Field '%s': type mismatch - %s expected."; <<<<<<< columnIndices[names.length - 1] = table.getColumnIndex(names[names.length - 1]); if (fieldType != table.getColumnType(columnIndices[names.length - 1])) { throw new IllegalArgumentException(String.format("Field '%s': type mismatch.", names[names.length - 1])); ======= columnIndices[names.length - 1] = table.getColumnIndex(names[names.length - 1]); if (fieldType != null && fieldType != table.getColumnType(columnIndices[names.length - 1])) { throw new IllegalArgumentException(String.format("Field '%s': type mismatch.", names[names.length - 1])); >>>>>>> columnIndices[names.length - 1] = table.getColumnIndex(names[names.length - 1]); if (fieldType != null && fieldType != table.getColumnType(columnIndices[names.length - 1])) { throw new IllegalArgumentException(String.format("Field '%s': type mismatch.", names[names.length - 1])); <<<<<<< * Equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * Equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * Equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * Equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * Not-equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * Not-equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * Not-equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * Not-equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object. * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * Not-equal-to comparison * * @param fieldName The field to compare * @param value The value to compare with * @return The query object * @throws java.lang.IllegalArgumentException One or more arguments do not match class or * field type * @throws java.lang.RuntimeException Any other error ======= * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. >>>>>>> * Not-equal-to comparison. * @param fieldName the field to compare. * @param value the value to compare with. * @return the query object * @throws java.lang.IllegalArgumentException if one or more arguments do not match class or * field type. * @throws java.lang.RuntimeException if any other error happens. <<<<<<< * * @param fieldName The field name * @return The minimum value ======= * @param fieldName The field name * @return If no objects exist or they all have {@code null} as the value for the given * date field, {@code null} will be returned. Otherwise the minimum date is returned. * When determining the minimum date, objects with {@code null} values are ignored. >>>>>>> * @param fieldName The field name * @return If no objects exist or they all have {@code null} as the value for the given * date field, {@code null} will be returned. Otherwise the minimum date is returned. * When determining the minimum date, objects with {@code null} values are ignored. <<<<<<< * Find the maximum value of a field * * @param fieldName The field name * @return The maximum value * @throws java.lang.UnsupportedOperationException The query is not valid ("syntax error") ======= * Find the maximum value of a field. * @param fieldName the field name. * @return if no objects exist or they all have {@code null} as the value for the given * date field, {@code null} will be returned. Otherwise the maximum date is returned. * When determining the maximum date, objects with {@code null} values are ignored. * @throws java.lang.UnsupportedOperationException the query is not valid ("syntax error"). >>>>>>> * Find the maximum value of a field. * @param fieldName the field name. * @return if no objects exist or they all have {@code null} as the value for the given * date field, {@code null} will be returned. Otherwise the maximum date is returned. * When determining the maximum date, objects with {@code null} values are ignored. * @throws java.lang.UnsupportedOperationException the query is not valid ("syntax error").
<<<<<<< import mods.railcraft.common.items.Metal; import mods.railcraft.common.items.RailcraftItems; import mods.railcraft.common.plugins.forge.WorldPlugin; import net.minecraft.block.SoundType; ======= import mods.railcraft.common.blocks.aesthetics.glass.BlockStrengthGlass; import mods.railcraft.common.items.Metal; import mods.railcraft.common.items.RailcraftItems; import mods.railcraft.common.plugins.color.EnumColor; import net.minecraft.block.Block; >>>>>>> import mods.railcraft.common.blocks.aesthetics.glass.BlockStrengthGlass; import mods.railcraft.common.items.Metal; import mods.railcraft.common.items.RailcraftItems; import mods.railcraft.common.plugins.color.EnumColor; import net.minecraft.block.Block; import net.minecraft.block.SoundType; <<<<<<< public class BlockTankIronGauge extends BlockTankIron { public static final PropertyEnum<ColumnPosition> POSITION = PropertyEnum.create("position", ColumnPosition.class); ======= public class BlockTankIronGauge extends BlockTankIron { public static final PropertyEnum<BlockStrengthGlass.Position> POSITION = PropertyEnum.create("position", BlockStrengthGlass.Position.class); >>>>>>> public class BlockTankIronGauge extends BlockTankIron { public static final PropertyEnum<BlockStrengthGlass.Position> POSITION = PropertyEnum.create("position", BlockStrengthGlass.Position.class); <<<<<<< setSoundType(SoundType.GLASS); setDefaultState(getDefaultState().withProperty(POSITION, ColumnPosition.SINGLE)); ======= setDefaultState(blockState.getBaseState().withProperty(getVariantProperty(), EnumColor.WHITE).withProperty(POSITION, BlockStrengthGlass.Position.SINGLE)); >>>>>>> setSoundType(SoundType.GLASS); setDefaultState(blockState.getBaseState().withProperty(getVariantProperty(), EnumColor.WHITE).withProperty(POSITION, BlockStrengthGlass.Position.SINGLE)); <<<<<<< return new BlockStateContainer(this, COLOR, POSITION); ======= return new BlockStateContainer(this, getVariantProperty(), POSITION); >>>>>>> return new BlockStateContainer(this, getVariantProperty(), POSITION); <<<<<<< public BlockRenderLayer getRenderLayer() { return BlockRenderLayer.TRANSLUCENT; ======= public BlockRenderLayer getBlockLayer() { return BlockRenderLayer.CUTOUT; } @Override @SideOnly(Side.CLIENT) public boolean shouldSideBeRendered(IBlockState blockState, IBlockAccess blockAccess, BlockPos pos, EnumFacing side) { IBlockState iblockstate = blockAccess.getBlockState(pos.offset(side)); Block block = iblockstate.getBlock(); return block == this ? false : super.shouldSideBeRendered(blockState, blockAccess, pos, side); >>>>>>> public BlockRenderLayer getRenderLayer() { return BlockRenderLayer.CUTOUT; } @SuppressWarnings("deprecation") @Override @SideOnly(Side.CLIENT) public boolean shouldSideBeRendered(IBlockState blockState, IBlockAccess blockAccess, BlockPos pos, EnumFacing side) { IBlockState iblockstate = blockAccess.getBlockState(pos.offset(side)); Block block = iblockstate.getBlock(); return block == this ? false : super.shouldSideBeRendered(blockState, blockAccess, pos, side); <<<<<<< @SuppressWarnings("deprecation") @Override public boolean shouldSideBeRendered(IBlockState state, IBlockAccess access, BlockPos pos, EnumFacing side) { return WorldPlugin.getBlock(access, pos.offset(side)) != this && super.shouldSideBeRendered(state, access, pos, side); } public enum ColumnPosition implements IStringSerializable { SINGLE, TOP, MIDDLE, BOTTOM; private final String name = name().toLowerCase(); @Override public String getName() { return name; } } ======= >>>>>>>
<<<<<<< if (RailcraftConfig.printSignalDebug() && prevAspect != SignalAspect.BLINK_RED && controller.getAspect() == SignalAspect.BLINK_RED) { Game.log(Level.INFO, "Signal Tile changed aspect to BLINK_RED: source:[{0}]", getPos()); ======= if (SignalTools.printSignalDebug && prevAspect != SignalAspect.BLINK_RED && controller.getAspect() == SignalAspect.BLINK_RED) { Game.log(Level.INFO, "Signal Tile changed aspect to BLINK_RED: source:[{0}, {1}, {2}]", xCoord, yCoord, zCoord); >>>>>>> if (SignalTools.printSignalDebug && prevAspect != SignalAspect.BLINK_RED && controller.getAspect() == SignalAspect.BLINK_RED) { Game.log(Level.INFO, "Signal Tile changed aspect to BLINK_RED: source:[{0}]", getPos());
<<<<<<< import mods.railcraft.common.blocks.machine.interfaces.ITileLit; ======= import mods.railcraft.common.blocks.aesthetics.glass.BlockStrengthGlass; import mods.railcraft.common.blocks.interfaces.ITileLit; import mods.railcraft.common.gui.EnumGui; >>>>>>> import mods.railcraft.common.blocks.aesthetics.glass.BlockStrengthGlass; import mods.railcraft.common.blocks.interfaces.ITileLit; import mods.railcraft.common.fluids.tanks.StandardTank; <<<<<<< import net.minecraft.util.EnumFacing; ======= >>>>>>> <<<<<<< ======= import javax.annotation.Nullable; import java.util.EnumSet; >>>>>>> import java.util.EnumSet; <<<<<<< return base; ======= return state; >>>>>>> return state; <<<<<<< boolean upConnected = WorldPlugin.getBlock(world, this.pos.offset(EnumFacing.UP)) == getBlockType(); boolean downConnected = WorldPlugin.getBlock(world, this.pos.offset(EnumFacing.DOWN)) == getBlockType(); if (upConnected) { if (downConnected) { base = base.withProperty(BlockTankIronGauge.POSITION, BlockTankIronGauge.ColumnPosition.MIDDLE); } else { base = base.withProperty(BlockTankIronGauge.POSITION, BlockTankIronGauge.ColumnPosition.BOTTOM); } } else { if (downConnected) { base = base.withProperty(BlockTankIronGauge.POSITION, BlockTankIronGauge.ColumnPosition.TOP); } else { base = base.withProperty(BlockTankIronGauge.POSITION, BlockTankIronGauge.ColumnPosition.SINGLE); } } return base; ======= EnumSet<BlockStrengthGlass.Position> neighbors = EnumSet.noneOf(BlockStrengthGlass.Position.class); if (WorldPlugin.getBlockState(world, pos.up()) == state) neighbors.add(BlockStrengthGlass.Position.TOP); if (WorldPlugin.getBlockState(world, pos.down()) == state) neighbors.add(BlockStrengthGlass.Position.BOTTOM); state = state.withProperty(BlockTankIronGauge.POSITION, BlockStrengthGlass.Position.patterns.get(neighbors)); return state; >>>>>>> EnumSet<BlockStrengthGlass.Position> neighbors = EnumSet.noneOf(BlockStrengthGlass.Position.class); if (WorldPlugin.getBlockState(world, pos.up()) == state) neighbors.add(BlockStrengthGlass.Position.TOP); if (WorldPlugin.getBlockState(world, pos.down()) == state) neighbors.add(BlockStrengthGlass.Position.BOTTOM); state = state.withProperty(BlockTankIronGauge.POSITION, BlockStrengthGlass.Position.patterns.get(neighbors)); return state;
<<<<<<< import mods.railcraft.common.blocks.charge.IChargeBlock; import mods.railcraft.common.items.ItemCharge; ======= import mods.railcraft.api.charge.ConnectType; import mods.railcraft.api.charge.IChargeBlock; >>>>>>> import mods.railcraft.api.charge.ConnectType; import mods.railcraft.api.charge.IChargeBlock; import mods.railcraft.common.items.ItemCharge;
<<<<<<< import buildcraft.api.gates.IAction; ======= import buildcraft.api.power.IPowerReceptor; import buildcraft.api.power.PowerHandler; import buildcraft.api.power.PowerHandler.PowerReceiver; import buildcraft.api.statements.IActionExternal; >>>>>>> import buildcraft.api.statements.IActionExternal; <<<<<<< private final Set<IAction> actions = new HashSet<IAction>(); ======= private final Set<IActionExternal> actions = new HashSet<IActionExternal>(); private final IIndicatorController energyIndicator = new EnergyIndicator(); private class EnergyIndicator extends IndicatorController { @Override protected void refreshToolTip() { tip.text = String.format("%d MJ", guiEnergy); } @Override public int getScaledLevel(int size) { float e = Math.min(guiEnergy, MAX_ENERGY); return (int) (e * size / MAX_ENERGY); } }; public IIndicatorController getEnergyIndicator() { return energyIndicator; } >>>>>>> private final Set<IActionExternal> actions = new HashSet<IActionExternal>();
<<<<<<< GameRegistry.registerTileEntity(TileAdminFeeder.class, "RCAdminFeederTile"); ======= GameRegistry.registerTileEntity(TileAdminSteamProducer.class, "RCAdminSteamProducerTile"); >>>>>>> GameRegistry.registerTileEntity(TileAdminFeeder.class, "RCAdminFeederTile"); GameRegistry.registerTileEntity(TileAdminSteamProducer.class, "RCAdminSteamProducerTile");
<<<<<<< public abstract class BlockContainerRailcraftSubtyped<V extends Enum<V> & IVariantEnumBlock<V>> extends BlockContainerRailcraft implements ISubtypedBlock<V> { private Class<V> variantClass; private V[] variantValues; private PropertyEnum<V> variantProperty; ======= public abstract class BlockContainerRailcraftSubtyped<V extends Enum<V> & IVariantEnum> extends BlockContainerRailcraft implements ISubtypedBlock<V> { private VariantData<V> variantData; >>>>>>> public abstract class BlockContainerRailcraftSubtyped<V extends Enum<V> & IVariantEnum> extends BlockContainerRailcraft implements ISubtypedBlock<V> { private VariantData<V> variantData; <<<<<<< private void setup() { if (variantClass != null) { return; } RailcraftBlockMetadata annotation = getClass().getAnnotation(RailcraftBlockMetadata.class); //noinspection unchecked variantClass = (Class<V>) annotation.variant(); variantValues = variantClass.getEnumConstants(); variantProperty = PropertyEnum.create("variant", variantClass, variantValues); ======= @Override public VariantData<V> getVariantData() { if (variantData == null) variantData = ISubtypedBlock.super.getVariantData(); return variantData; >>>>>>> @Override public VariantData<V> getVariantData() { if (variantData == null) variantData = ISubtypedBlock.super.getVariantData(); return variantData; <<<<<<< @SuppressWarnings("unchecked") @Override public IBlockState getState(@Nullable IVariantEnum variant) { if (variant == null) return getDefaultState(); checkVariant(variant); return getDefaultState().withProperty(getVariantProperty(), (V) variant); } @Override @NotNull public final IProperty<V> getVariantProperty() { setup(); return variantProperty; } @NotNull @Override public final Class<? extends V> getVariantEnum() { return variantClass; } @NotNull @Override public final V[] getVariants() { return variantValues; } ======= >>>>>>> <<<<<<< public void getSubBlocks(CreativeTabs tab, NonNullList<ItemStack> list) { for (V variant : getVariants()) { CreativePlugin.addToList(list, getStack(variant)); ======= public void getSubBlocks(Item itemIn, CreativeTabs tab, List<ItemStack> list) { for (V variant : getVariants()) { CreativePlugin.addToList(list, getStack(variant)); >>>>>>> public void getSubBlocks(CreativeTabs tab, NonNullList<ItemStack> list) { for (V variant : getVariants()) { CreativePlugin.addToList(list, getStack(variant));
<<<<<<< buttonList.add(stateMode = GuiMultiButton.create(4, w + 53, h + 65, 70, tile.getStateModeController().copy())); ======= buttonList.add(stateMode = new GuiMultiButton(4, w + 23, h + 65, 130, tile.getStateModeController().copy())); >>>>>>> buttonList.add(stateMode = GuiMultiButton.create(4, w + 23, h + 65, 130, tile.getStateModeController().copy()));
<<<<<<< import mods.railcraft.common.items.Metal; import mods.railcraft.common.items.RailcraftItems; import mods.railcraft.common.plugins.forge.WorldPlugin; import net.minecraft.block.SoundType; ======= import mods.railcraft.common.blocks.aesthetics.glass.BlockStrengthGlass; import mods.railcraft.common.items.Metal; import mods.railcraft.common.items.RailcraftItems; import mods.railcraft.common.plugins.color.EnumColor; import net.minecraft.block.Block; >>>>>>> import mods.railcraft.common.blocks.aesthetics.glass.BlockStrengthGlass; import mods.railcraft.common.items.Metal; import mods.railcraft.common.items.RailcraftItems; import mods.railcraft.common.plugins.color.EnumColor; import net.minecraft.block.Block; import net.minecraft.block.SoundType; <<<<<<< setSoundType(SoundType.GLASS); setDefaultState(getDefaultState().withProperty(POSITION, BlockTankIronGauge.ColumnPosition.SINGLE)); ======= setDefaultState(blockState.getBaseState().withProperty(getVariantProperty(), EnumColor.WHITE).withProperty(BlockTankIronGauge.POSITION, BlockStrengthGlass.Position.SINGLE)); >>>>>>> setSoundType(SoundType.GLASS); setDefaultState(blockState.getBaseState().withProperty(getVariantProperty(), EnumColor.WHITE).withProperty(BlockTankIronGauge.POSITION, BlockStrengthGlass.Position.SINGLE)); <<<<<<< return new BlockStateContainer(this, COLOR, POSITION); } @Override public void defineRecipes() { super.defineRecipes(); addRecipe("GPG", "PGP", "GPG", 'G', Blocks.GLASS_PANE, 'P', RailcraftItems.PLATE, Metal.STEEL); ======= return new BlockStateContainer(this, getVariantProperty(), BlockTankIronGauge.POSITION); >>>>>>> return new BlockStateContainer(this, getVariantProperty(), BlockTankIronGauge.POSITION); } @Override public void defineRecipes() { super.defineRecipes(); addRecipe("GPG", "PGP", "GPG", 'G', Blocks.GLASS_PANE, 'P', RailcraftItems.PLATE, Metal.STEEL); <<<<<<< public BlockRenderLayer getRenderLayer() { return BlockRenderLayer.CUTOUT; ======= public BlockRenderLayer getBlockLayer() { return BlockRenderLayer.CUTOUT; } @Override @SideOnly(Side.CLIENT) public boolean shouldSideBeRendered(IBlockState blockState, IBlockAccess blockAccess, BlockPos pos, EnumFacing side) { IBlockState iblockstate = blockAccess.getBlockState(pos.offset(side)); Block block = iblockstate.getBlock(); return block == this ? false : super.shouldSideBeRendered(blockState, blockAccess, pos, side); >>>>>>> public BlockRenderLayer getRenderLayer() { return BlockRenderLayer.CUTOUT;
<<<<<<< import java.util.List; import org.junit.Test; import org.junit.experimental.categories.Category; ======= import java.util.Optional; import org.junit.jupiter.api.Test; >>>>>>> import java.util.List; import java.util.Optional; import org.junit.jupiter.api.Test;
<<<<<<< private Class<V> variantClass; private V[] variantValues; private PropertyEnum<V> variantProperty; ======= private VariantData<V> variantData; >>>>>>> private VariantData<V> variantData; <<<<<<< } private void setup() { if (variantProperty == null) { RailcraftBlockMetadata annotation = getClass().getAnnotation(RailcraftBlockMetadata.class); //noinspection unchecked this.variantClass = (Class<V>) annotation.variant(); this.variantValues = variantClass.getEnumConstants(); this.variantProperty = PropertyEnum.create("variant", variantClass, variantValues); } ======= >>>>>>>
<<<<<<< import mods.railcraft.api.core.IPostConnection; import mods.railcraft.api.core.IVariantEnum; import mods.railcraft.common.blocks.BlockEntityDelegate; import mods.railcraft.common.blocks.ISubtypedBlock; import mods.railcraft.common.blocks.TileManager; import mods.railcraft.common.blocks.machine.interfaces.ITileRotate; import mods.railcraft.common.plugins.color.ColorPlugin; import mods.railcraft.common.plugins.color.EnumColor; ======= import mods.railcraft.common.blocks.BlockEntityDelegate; import mods.railcraft.common.blocks.ISubtypedBlock; >>>>>>> import mods.railcraft.common.blocks.BlockEntityDelegate; import mods.railcraft.common.blocks.ISubtypedBlock; <<<<<<< import mods.railcraft.common.plugins.forge.WorldPlugin; import mods.railcraft.common.util.collections.ArrayTools; ======= >>>>>>> import mods.railcraft.common.plugins.forge.WorldPlugin; <<<<<<< import net.minecraft.block.properties.IProperty; import net.minecraft.block.properties.PropertyEnum; import net.minecraft.block.state.BlockStateContainer; ======= import net.minecraft.block.state.BlockStateContainer; >>>>>>> import net.minecraft.block.state.BlockStateContainer; <<<<<<< import net.minecraft.item.EnumDyeColor; ======= import net.minecraft.item.Item; >>>>>>> <<<<<<< import net.minecraft.util.EnumHand; import net.minecraft.util.NonNullList; ======= >>>>>>> import net.minecraft.util.NonNullList; <<<<<<< public boolean onBlockActivated(World worldIn, BlockPos pos, IBlockState state, EntityPlayer playerIn, EnumHand hand, EnumFacing side, float hitX, float hitY, float hitZ) { if (hand == EnumHand.OFF_HAND) return false; return TileManager.forTile(this::getTileClass, state, worldIn, pos) .retrieve(TileMachineBase.class, t -> t.blockActivated(playerIn, hand, side, hitX, hitY, hitZ)).orElse(false); ======= public int getMetaFromState(IBlockState state) { return state.getValue(getVariantProperty()).ordinal(); >>>>>>> public int getMetaFromState(IBlockState state) { return state.getValue(getVariantProperty()).ordinal(); <<<<<<< ======= @Override public boolean hasTileEntity(IBlockState state) { return true; } >>>>>>> <<<<<<< @Override public ConnectStyle connectsToPost(IBlockAccess world, BlockPos pos, IBlockState state, EnumFacing face) { TileEntity tile = WorldPlugin.getBlockTile(world, pos); if (tile instanceof TileMachineBase) return ((TileMachineBase) tile).connectsToPost(face); return ConnectStyle.NONE; } ======= >>>>>>>
<<<<<<< import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; import java.nio.file.ProviderNotFoundException; ======= >>>>>>> import org.springframework.beans.factory.annotation.Qualifier;
<<<<<<< import mods.railcraft.common.items.RailcraftItems; import mods.railcraft.common.items.RailcraftToolItems; ======= >>>>>>> import mods.railcraft.common.items.RailcraftItems; import mods.railcraft.common.items.RailcraftToolItems; <<<<<<< @Override public void preInit() { EnumMachineAlpha alpha = EnumMachineAlpha.TANK_WATER; if (alpha.isAvailable()) CraftingPlugin.addRecipe(alpha.getItem(6), "WWW", "ISI", "WWW", 'I', "ingotIron", 'S', "slimeball", 'W', "plankWood"); ======= @Override public void initFirst() { ItemNotepad.registerItem(); if (ItemNotepad.item != null) { ItemStack magGlass = ItemMagnifyingGlass.getItem(); CraftingPlugin.addShapedRecipe(new ItemStack(ItemNotepad.item), "IF", "XP", 'I', new ItemStack(Items.dye, 1, 0), 'F', Items.feather, 'X', magGlass, 'P', Items.paper); } EnumMachineAlpha alpha = EnumMachineAlpha.TANK_WATER; if (alpha.register()) CraftingPlugin.addShapedRecipe(alpha.getItem(6), "WWW", "ISI", "WWW", 'I', "ingotIron", 'S', "slimeball", 'W', "plankWood"); >>>>>>> @Override public void preInit() { ItemNotepad.registerItem(); if (ItemNotepad.item != null) { CraftingPlugin.addRecipe(new ItemStack(ItemNotepad.item), "IF", "XP", 'I', new ItemStack(Items.dye, 1, 0), 'F', Items.feather, 'X', RailcraftItems.magGlass, 'P', Items.paper); } EnumMachineAlpha alpha = EnumMachineAlpha.TANK_WATER; if (alpha.isAvailable()) CraftingPlugin.addRecipe(alpha.getItem(6), "WWW", "ISI", "WWW", 'I', "ingotIron", 'S', "slimeball", 'W', "plankWood");
<<<<<<< final Data data = new Data(); data.setName(resolvedAttribute(dataNode, ATTRITBUTE.NAME)); data.setSourceLocation((Location) dataNode.getUserData(Location.USER_DATA_ID)); ======= final String source = resolvedAttribute(dataNode, AttributeName.SOURCE); data = new Data(); data.setName(resolvedAttribute(dataNode, AttributeName.NAME)); metamorph.registerNamedValueReceiver(source, data); >>>>>>> final Data data = new Data(); data.setName(resolvedAttribute(dataNode, AttributeName.NAME)); data.setSourceLocation((Location) dataNode.getUserData(Location.USER_DATA_ID));
<<<<<<< throw new RuntimeException("query statement as column not supported!"); MySQLItemVisitor ev = new MySQLItemVisitor(currentDb, this.charsetIndex); ======= throw new RuntimeException("query statement as column is not supported!"); MySQLItemVisitor ev = new MySQLItemVisitor(currentDb); >>>>>>> throw new RuntimeException("query statement as column is not supported!"); MySQLItemVisitor ev = new MySQLItemVisitor(currentDb, this.charsetIndex);
<<<<<<< private BackendConnection takeCon(BackendConnection conn, final ResponseHandler handler, final Object attachment, String schema) { takeCon(conn,schema); ======= private BackendConnection takeCon(BackendConnection conn, final ResponseHandler handler, final Object attachment, String schema) { conn.setBorrowed(true); if (!conn.getSchema().equals(schema)) { // need do schema syn in before sql send conn.setSchema(schema); } ConQueue queue = conMap.getSchemaConQueue(schema); queue.incExecuteCount(); >>>>>>> private BackendConnection takeCon(BackendConnection conn, final ResponseHandler handler, final Object attachment, String schema) { takeCon(conn,schema); <<<<<<< ======= // 每次取连接的时候,更新下lasttime,防止在前端连接检查的时候,关闭连接,导致sql执行失败 conn.setLastTime(System.currentTimeMillis()); >>>>>>> <<<<<<< public BackendConnection getConnection(String schema, boolean autocommit) throws IOException { // 从当前连接map中拿取已建立好的后端连接 BackendConnection con = this.conMap.tryTakeCon(schema, autocommit); if (con == null) { int activeCons = this.getActiveCount();// 当前最大活动连接 if (activeCons + 1 > size) {// 下一个连接大于最大连接数 LOGGER.error("the max activeConnnections size can not be max than maxconnections"); throw new IOException("the max activeConnnections size can not be max than maxconnections"); } else { // create connection LOGGER.info( "no ilde connection in pool,create new connection for " + this.name + " of schema " + schema); NewConnectionRespHandler simpleHandler = new NewConnectionRespHandler(); this.createNewConnection(simpleHandler, schema); con = simpleHandler.getBackConn(); } } return takeCon(con, schema); } ======= >>>>>>> public BackendConnection getConnection(String schema, boolean autocommit) throws IOException { // 从当前连接map中拿取已建立好的后端连接 BackendConnection con = this.conMap.tryTakeCon(schema, autocommit); if (con == null) { int activeCons = this.getActiveCount();// 当前最大活动连接 if (activeCons + 1 > size) {// 下一个连接大于最大连接数 LOGGER.error("the max activeConnnections size can not be max than maxconnections"); throw new IOException("the max activeConnnections size can not be max than maxconnections"); } else { // create connection LOGGER.info( "no ilde connection in pool,create new connection for " + this.name + " of schema " + schema); NewConnectionRespHandler simpleHandler = new NewConnectionRespHandler(); this.createNewConnection(simpleHandler, schema); con = simpleHandler.getBackConn(); } } return takeCon(con, schema); }
<<<<<<< if (SystemVariables.getSysVars().getDefaultValue(key) == null) { ======= if (key.startsWith("@@")) { key = key.substring(2); } if (SystemVariables.getDefaultValue(key) == null) { >>>>>>> if (key.startsWith("@@")) { key = key.substring(2); } if (SystemVariables.getSysVars().getDefaultValue(key) == null) { <<<<<<< if (SystemVariables.getSysVars().getDefaultValue(key) == null) { ======= if (key.startsWith("@@")) { key = key.substring(2); } if (SystemVariables.getDefaultValue(key) == null) { >>>>>>> if (key.startsWith("@@")) { key = key.substring(2); } if (SystemVariables.getSysVars().getDefaultValue(key) == null) {
<<<<<<< ======= import com.vmware.flowgate.common.model.WormholePrivilege; >>>>>>> import com.vmware.flowgate.common.model.WormholePrivilege; <<<<<<< ======= >>>>>>> <<<<<<< @RequestMapping(value = "/privileges", method = RequestMethod.GET) public Set<String> getPrivilegeName(HttpServletRequest request) { WormholeUserDetails user = accessTokenService.getCurrentUser(request); AuthorityUtil util = new AuthorityUtil(); return util.getPrivilege(user); ======= @RequestMapping(value="/privileges",method = RequestMethod.GET) public Set<String> getPrivilegeName(HttpServletRequest request){ WormholeUserDetails user = accessTokenService.getCurrentUser(request); AuthorityUtil util = new AuthorityUtil(); return util.getPrivilege(user); } public List<WormholePrivilege> readPrivilege(){ return privilegeRepository.findAll(); >>>>>>> @RequestMapping(value = "/privileges", method = RequestMethod.GET) public Set<String> getPrivilegeName(HttpServletRequest request) { WormholeUserDetails user = accessTokenService.getCurrentUser(request); AuthorityUtil util = new AuthorityUtil(); return util.getPrivilege(user);
<<<<<<< LightningDialogBuilder bookmarksDialogBuilder; @Inject PreferenceManager preferenceManager; ======= BookmarksDialogBuilder mBookmarksDialogBuilder; >>>>>>> LightningDialogBuilder mBookmarksDialogBuilder; @Inject PreferenceManager mPreferenceManager; <<<<<<< eventBus.post(new BrowserEvents.OpenUrlInCurrentTab(item.getUrl())); ======= mEventBus.post(new BookmarkEvents.Clicked(item)); >>>>>>> mEventBus.post(new BrowserEvents.OpenUrlInCurrentTab(item.getUrl())); <<<<<<< final BrowserActivity activity = (BrowserActivity) getActivity(); boolean darkTheme = preferenceManager.getUseTheme() != 0 || activity.isIncognito(); ======= final Activity activity = getActivity(); final PreferenceManager preferenceManager = PreferenceManager.getInstance(); boolean darkTheme = preferenceManager.getUseTheme() != 0 || ((BrowserActivity) activity).isIncognito(); >>>>>>> final BrowserActivity activity = (BrowserActivity) getActivity(); boolean darkTheme = mPreferenceManager.getUseTheme() != 0 || ((BrowserActivity) activity).isIncognito(); <<<<<<< bookmarksDialogBuilder.showLongPressLinkDialog(getContext(), item.getUrl()); ======= mBookmarksDialogBuilder.showLongPressedDialogForUrl(getContext(), item); >>>>>>> mBookmarksDialogBuilder.showLongPressLinkDialog(getContext(), item.getUrl());
<<<<<<< KStream<byte[], String> stream1 = builder.stream(inputTopicA); KStream<byte[], String> stream2 = stream1.mapValues((v -> v.toUpperCase())); KStream<byte[], String> stream3 = stream1.mapValues(v -> v.toLowerCase()); ======= final KStream<byte[], String> stream1 = builder.stream(inputTopicA); final KStream<byte[], String> stream2 = stream1.mapValues(String::toUpperCase); final KStream<byte[], String> stream3 = stream1.mapValues(String::toLowerCase); >>>>>>> final KStream<byte[], String> stream1 = builder.stream(inputTopicA); final KStream<byte[], String> stream2 = stream1.mapValues((v -> v.toUpperCase())); final KStream<byte[], String> stream3 = stream1.mapValues(v -> v.toLowerCase());
<<<<<<< new KeyValue<>("car-advertisement", "shown/not-clicked-yet"), new KeyValue<>("newspaper-advertisement", "shown/not-clicked-yet"), new KeyValue<>("gadget-advertisement", "shown/not-clicked-yet"), new KeyValue<>("newspaper-advertisement", "shown/clicked"), new KeyValue<>("gadget-advertisement", "shown/clicked"), new KeyValue<>("newspaper-advertisement", "shown/clicked") ======= new KeyValue<>("car-advertisement", "shown/null"), new KeyValue<>("newspaper-advertisement", "shown/null"), new KeyValue<>("gadget-advertisement", "shown/null"), new KeyValue<>("newspaper-advertisement", "shown/clicked"), new KeyValue<>("gadget-advertisement", "shown/clicked"), new KeyValue<>("newspaper-advertisement", "shown/clicked") >>>>>>> new KeyValue<>("car-advertisement", "shown/not-clicked-yet"), new KeyValue<>("newspaper-advertisement", "shown/not-clicked-yet"), new KeyValue<>("gadget-advertisement", "shown/not-clicked-yet"), new KeyValue<>("newspaper-advertisement", "shown/clicked"), new KeyValue<>("gadget-advertisement", "shown/clicked"), new KeyValue<>("newspaper-advertisement", "shown/clicked") <<<<<<< ======= >>>>>>> <<<<<<< final KStream<String, String> impressionsAndClicks = alerts.outerJoin( incidents, (impressionValue, clickValue) -> (clickValue == null)? impressionValue + "/not-clicked-yet": impressionValue + "/" + clickValue, // KStream-KStream joins are always windowed joins, hence we must provide a join window. JoinWindows.of(Duration.ofSeconds(5)), // In this specific example, we don't need to define join serdes explicitly because the key, left value, and // right value are all of type String, which matches our default serdes configured for the application. However, // we want to showcase the use of `Joined.with(...)` in case your code needs a different type setup. Joined.with( Serdes.String(), /* key */ Serdes.String(), /* left value */ Serdes.String() /* right value */ ) ); ======= final KStream<String, String> impressionsAndClicks = alerts.outerJoin(incidents, (impressionValue, clickValue) -> impressionValue + "/" + clickValue, // KStream-KStream joins are always windowed joins, hence we must provide a join window. JoinWindows.of(TimeUnit.SECONDS.toMillis(5))); >>>>>>> final KStream<String, String> impressionsAndClicks = alerts.outerJoin( incidents, (impressionValue, clickValue) -> (clickValue == null)? impressionValue + "/not-clicked-yet": impressionValue + "/" + clickValue, // KStream-KStream joins are always windowed joins, hence we must provide a join window. JoinWindows.of(Duration.ofSeconds(5)), // In this specific example, we don't need to define join serdes explicitly because the key, left value, and // right value are all of type String, which matches our default serdes configured for the application. However, // we want to showcase the use of `Joined.with(...)` in case your code needs a different type setup. Joined.with( Serdes.String(), /* key */ Serdes.String(), /* left value */ Serdes.String() /* right value */ ) );
<<<<<<< rest.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); Paths paths = new Paths("localhost", rest.port()); ======= rest.start(CLUSTER.bootstrapServers()); final Paths paths = new Paths("localhost", rest.port()); >>>>>>> rest.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); final Paths paths = new Paths("localhost", rest.port()); <<<<<<< rest.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); Paths paths = new Paths("localhost", rest.port()); ======= rest.start(CLUSTER.bootstrapServers()); final Paths paths = new Paths("localhost", rest.port()); >>>>>>> rest.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); final Paths paths = new Paths("localhost", rest.port()); <<<<<<< rest.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); Paths paths = new Paths("localhost", rest.port()); ======= rest.start(CLUSTER.bootstrapServers()); final Paths paths = new Paths("localhost", rest.port()); >>>>>>> rest.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); final Paths paths = new Paths("localhost", rest.port()); <<<<<<< rest.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); Paths paths1 = new Paths("localhost", rest.port()); ======= rest.start(CLUSTER.bootstrapServers()); final Paths paths1 = new Paths("localhost", rest.port()); >>>>>>> rest.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); final Paths paths1 = new Paths("localhost", rest.port()); <<<<<<< rest2.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); Paths paths2 = new Paths("localhost", rest2.port()); ======= rest2.start(CLUSTER.bootstrapServers()); final Paths paths2 = new Paths("localhost", rest2.port()); >>>>>>> rest2.start(CLUSTER.bootstrapServers(), TestUtils.tempDirectory().getPath()); final Paths paths2 = new Paths("localhost", rest2.port());
<<<<<<< private final ListBranchesPort listBranchesPort; ======= private final TaskExecutor taskExecutor; >>>>>>> private final ListBranchesPort listBranchesPort; private final TaskExecutor taskExecutor; <<<<<<< UpdateProjectPort updateProjectPort, ListBranchesPort listBranchesPort) { ======= UpdateProjectPort updateProjectPort, TaskExecutor taskExecutor) { >>>>>>> UpdateProjectPort updateProjectPort, ListBranchesPort listBranchesPort, TaskExecutor taskExecutor) { <<<<<<< this.listBranchesPort = listBranchesPort; ======= this.taskExecutor = taskExecutor; >>>>>>> this.listBranchesPort = listBranchesPort; this.taskExecutor = taskExecutor;
<<<<<<< public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getModulePath() { return modulePath; } public void setModulePath(String modulePath) { this.modulePath = modulePath; } ======= >>>>>>>
<<<<<<< client.target(path.urlPost()).request(APPLICATION_JSON_TYPE).post(Entity.json(inputOrder)); final Invocation.Builder builder = client ======= postWithRetries(client.target(path.urlPost()).request(APPLICATION_JSON_TYPE), Entity.json(inputOrder), 5); Invocation.Builder builder = client >>>>>>> postWithRetries(client.target(path.urlPost()).request(APPLICATION_JSON_TYPE), Entity.json(inputOrder), 5); final Invocation.Builder builder = client <<<<<<< client.target(path.urlPost()).request(APPLICATION_JSON_TYPE).post(Entity.json(inputOrder)); final Invocation.Builder builder = client ======= postWithRetries(client.target(path.urlPost()).request(APPLICATION_JSON_TYPE), Entity.json(inputOrder), 5); Invocation.Builder builder = client >>>>>>> postWithRetries(client.target(path.urlPost()).request(APPLICATION_JSON_TYPE), Entity.json(inputOrder), 5); final Invocation.Builder builder = client <<<<<<< client.target(path.urlPost()).request(APPLICATION_JSON_TYPE).post(Entity.json(inputOrder)); final Invocation.Builder builder = client ======= postWithRetries(client.target(path.urlPost()).request(APPLICATION_JSON_TYPE), Entity.json(inputOrder), 5); Invocation.Builder builder = client >>>>>>> postWithRetries(client.target(path.urlPost()).request(APPLICATION_JSON_TYPE), Entity.json(inputOrder), 5); final Invocation.Builder builder = client
<<<<<<< private StreamsBuilder createOrdersMaterializedView() { StreamsBuilder builder = new StreamsBuilder(); builder.table(ORDERS.name(), Consumed.with(ORDERS.keySerde(), ORDERS.valueSerde()), Materialized.as(ORDERS_STORE_NAME)) ======= private Topology createOrdersMaterializedView() { final StreamsBuilder builder = new StreamsBuilder(); builder.table(ORDERS.name(), Consumed.with(ORDERS.keySerde(), ORDERS.valueSerde()), Materialized.as(ORDERS_STORE_NAME)) >>>>>>> private StreamsBuilder createOrdersMaterializedView() { final StreamsBuilder builder = new StreamsBuilder(); builder.table(ORDERS.name(), Consumed.with(ORDERS.keySerde(), ORDERS.valueSerde()), Materialized.as(ORDERS_STORE_NAME)) <<<<<<< private KafkaStreams startKStreams(String bootstrapServers) { KafkaStreams streams = new KafkaStreams( createOrdersMaterializedView().build(), ======= private KafkaStreams startKStreams(final String bootstrapServers) { final KafkaStreams streams = new KafkaStreams(createOrdersMaterializedView(), >>>>>>> private KafkaStreams startKStreams(final String bootstrapServers) { final KafkaStreams streams = new KafkaStreams( createOrdersMaterializedView().build(),
<<<<<<< StreamsBuilder builder = new StreamsBuilder(); ======= final KStreamBuilder builder = new KStreamBuilder(); >>>>>>> final StreamsBuilder builder = new StreamsBuilder(); <<<<<<< // The number of segments has no impact on "correctness". // Using more segments implies larger overhead but allows for more fined grained record expiration // Note: the specified retention time is a _minimum_ time span and no strict upper time bound int numberOfSegments = 3; // retention period must be at least window size -- for this use case, we don't need a longer retention period // and thus just use the window size as retention time long retentionPeriod = maintainDurationPerEventInMs; StoreBuilder<WindowStore<String, Long>> dedupStoreBuilder = Stores.windowStoreBuilder( Stores.persistentWindowStore(storeName, retentionPeriod, numberOfSegments, maintainDurationPerEventInMs, false ), Serdes.String(), Serdes.Long()); ======= final StateStoreSupplier deduplicationStoreSupplier = Stores.create("eventId-store") .withKeys(Serdes.String()) // must match the return type of the Transformer's id extractor .withValues(Serdes.Long()) .persistent() .windowed(maintainDurationPerEventInMs, TimeUnit.MINUTES.toMillis(30), 3, false) .build(); >>>>>>> // The number of segments has no impact on "correctness". // Using more segments implies larger overhead but allows for more fined grained record expiration // Note: the specified retention time is a _minimum_ time span and no strict upper time bound final int numberOfSegments = 3; // retention period must be at least window size -- for this use case, we don't need a longer retention period // and thus just use the window size as retention time final long retentionPeriod = maintainDurationPerEventInMs; final StoreBuilder<WindowStore<String, Long>> dedupStoreBuilder = Stores.windowStoreBuilder( Stores.persistentWindowStore(storeName, retentionPeriod, numberOfSegments, maintainDurationPerEventInMs, false ), Serdes.String(), Serdes.Long()); <<<<<<< KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); ======= final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); >>>>>>> final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration);
<<<<<<< import org.apache.kafka.streams.state.StoreBuilder; import org.apache.kafka.streams.state.Stores; import org.apache.kafka.streams.state.ValueAndTimestamp; ======= >>>>>>> import org.apache.kafka.streams.state.ValueAndTimestamp; <<<<<<< private KeyValueStore<String, Pair<Double, Instant>> streamBufferStore; private KeyValueStore<String, ValueAndTimestamp<Long>> tableStore; ======= private KeyValueStore<String, Long> tableStore; >>>>>>> private KeyValueStore<String, ValueAndTimestamp<Long>> tableStore; <<<<<<< streamBufferStore = (KeyValueStore<String, Pair<Double, Instant>>) context.getStateStore(streamBufferStoreName); tableStore = (KeyValueStore<String, ValueAndTimestamp<Long>>) context.getStateStore(tableStoreName); ======= tableStore = (KeyValueStore<String, Long>) context.getStateStore(tableStoreName); >>>>>>> tableStore = (KeyValueStore<String, ValueAndTimestamp<Long>>) context.getStateStore(tableStoreName); <<<<<<< final Double value, final long streamRecordTimestamp) { final ValueAndTimestamp<Long> tableValue = tableStore.get(key); ======= final Double value) { final Long tableValue = tableStore.get(key); >>>>>>> final Double value, final long streamRecordTimestamp) { final ValueAndTimestamp<Long> tableValue = tableStore.get(key);
<<<<<<< private Topology createOrdersMaterializedView() { StreamsBuilder builder = new StreamsBuilder(); builder.table(ORDERS.name(), Consumed.with(ORDERS.keySerde(), ORDERS.valueSerde()), Materialized.as(ORDERS_STORE_NAME)) ======= private KStreamBuilder createOrdersMaterializedView() { final KStreamBuilder builder = new KStreamBuilder(); builder.stream(ORDERS.keySerde(), ORDERS.valueSerde(), ORDERS.name()) .groupByKey(ORDERS.keySerde(), ORDERS.valueSerde()) .reduce((agg, newVal) -> newVal, ORDERS_STORE_NAME) >>>>>>> private Topology createOrdersMaterializedView() { final StreamsBuilder builder = new StreamsBuilder(); builder.table(ORDERS.name(), Consumed.with(ORDERS.keySerde(), ORDERS.valueSerde()), Materialized.as(ORDERS_STORE_NAME))
<<<<<<< @ClassRule public static final EmbeddedSingleNodeKafkaCluster CLUSTER = new EmbeddedSingleNodeKafkaCluster(); private static final String inputTopic = "inputTopic"; @BeforeClass public static void startKafkaCluster() { CLUSTER.createTopic(inputTopic); } ======= >>>>>>>
<<<<<<< StreamsBuilder builder = new StreamsBuilder(); ======= final KStreamBuilder builder = new KStreamBuilder(); >>>>>>> final StreamsBuilder builder = new StreamsBuilder(); <<<<<<< String maxWindowStore = "max-window-store"; input.groupByKey() .windowedBy(TimeWindows.of(TimeUnit.MINUTES.toMillis(1L)).until(TimeUnit.MINUTES.toMillis(5L))) .aggregate( () -> Long.MIN_VALUE, (aggKey, value, aggregate) -> Math.max(value, aggregate), Materialized.as(maxWindowStore)); KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); ======= input.groupByKey().aggregate( () -> Long.MIN_VALUE, (aggKey, value, aggregate) -> Math.max(value, aggregate), TimeWindows.of(TimeUnit.MINUTES.toMillis(1L)).until(TimeUnit.MINUTES.toMillis(5L)), Serdes.Long(), "max-window-store" ); final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); >>>>>>> final String maxWindowStore = "max-window-store"; input.groupByKey() .windowedBy(TimeWindows.of(TimeUnit.MINUTES.toMillis(1L)).until(TimeUnit.MINUTES.toMillis(5L))) .aggregate( () -> Long.MIN_VALUE, (aggKey, value, aggregate) -> Math.max(value, aggregate), Materialized.as(maxWindowStore)); final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); <<<<<<< ReadOnlyKeyValueStore<String, Long> keyValueStore = IntegrationTestUtils.waitUntilStoreIsQueryable(maxStore, QueryableStoreTypes.keyValueStore(), streams); ReadOnlyWindowStore<String, Long> windowStore = IntegrationTestUtils.waitUntilStoreIsQueryable(maxWindowStore, QueryableStoreTypes.windowStore(), streams); ======= final ReadOnlyKeyValueStore<String, Long> keyValueStore = IntegrationTestUtils.waitUntilStoreIsQueryable("max-store", QueryableStoreTypes.keyValueStore(), streams); final ReadOnlyWindowStore<String, Long> windowStore = IntegrationTestUtils.waitUntilStoreIsQueryable("max-window-store", QueryableStoreTypes.windowStore(), streams); >>>>>>> final ReadOnlyKeyValueStore<String, Long> keyValueStore = IntegrationTestUtils.waitUntilStoreIsQueryable(maxStore, QueryableStoreTypes.keyValueStore(), streams); final ReadOnlyWindowStore<String, Long> windowStore = IntegrationTestUtils.waitUntilStoreIsQueryable(maxWindowStore, QueryableStoreTypes.windowStore(), streams);
<<<<<<< import org.apache.kafka.streams.StreamsBuilder; ======= import org.apache.kafka.streams.KafkaStreams.State; >>>>>>> import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.KafkaStreams.State;
<<<<<<< StreamsBuilder builder = new StreamsBuilder(); ======= final KStreamBuilder builder = new KStreamBuilder(); >>>>>>> final StreamsBuilder builder = new StreamsBuilder(); <<<<<<< StoreBuilder<KeyValueStore<String, Long>> wordCountsStore = Stores.keyValueStoreBuilder( Stores.persistentKeyValueStore("WordCountsStore"), Serdes.String(), Serdes.Long()) .withCachingEnabled(); ======= final StateStoreSupplier wordCountsStore = Stores.create("WordCountsStore") .withKeys(Serdes.String()) .withValues(Serdes.Long()) .persistent() .build(); >>>>>>> final StoreBuilder<KeyValueStore<String, Long>> wordCountsStore = Stores.keyValueStoreBuilder( Stores.persistentKeyValueStore("WordCountsStore"), Serdes.String(), Serdes.Long()) .withCachingEnabled(); <<<<<<< KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); ======= final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); >>>>>>> final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration);
<<<<<<< import org.apache.kafka.streams.kstream.Consumed; ======= import org.apache.kafka.streams.Topology; >>>>>>> import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.Topology;
<<<<<<< StreamsBuilder builder = new StreamsBuilder(); ======= final KStreamBuilder builder = new KStreamBuilder(); >>>>>>> final StreamsBuilder builder = new StreamsBuilder(); <<<<<<< KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); ======= final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); >>>>>>> final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration);
<<<<<<< import org.apache.kafka.clients.consumer.ConsumerConfig; ======= import io.confluent.examples.streams.microservices.util.MicroserviceTestUtils; >>>>>>> import io.confluent.examples.streams.microservices.util.MicroserviceTestUtils; import org.apache.kafka.clients.consumer.ConsumerConfig; <<<<<<< private static final String WORD_COUNT = "interactive-queries-wordcount-example-word-count-repartition"; private static final String WINDOWED_WORD_COUNT = "interactive-queries-wordcount-example-windowed-word-count-repartition"; private static final String WORD_COUNT_OUTPUT = "interactive-queries-wordcount-example-word-count-changelog"; private static final String WINDOWED_WORD_COUNT_OUTPUT = "interactive-queries-wordcount-example-windowed-word-count-changelog"; ======= public static final String WORD_COUNT = "interactive-queries-wordcount-example-word-count-repartition"; public static final String WINDOWED_WORD_COUNT = "interactive-queries-wordcount-example-windowed-word-count-repartition"; >>>>>>> private static final String WORD_COUNT = "interactive-queries-wordcount-example-word-count-repartition"; private static final String WINDOWED_WORD_COUNT = "interactive-queries-wordcount-example-windowed-word-count-repartition"; private static final String WORD_COUNT_OUTPUT = "interactive-queries-wordcount-example-word-count-changelog"; private static final String WINDOWED_WORD_COUNT_OUTPUT = "interactive-queries-wordcount-example-windowed-word-count-changelog"; <<<<<<< createStreamConfig(CLUSTER.bootstrapServers(), port, "one")); final CountDownLatch startupLatch = new CountDownLatch(1); kafkaStreams.setStateListener((newState, oldState) -> { if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) { startupLatch.countDown(); } }); ======= createStreamConfig(CLUSTER.bootstrapServers(), port, "one")); >>>>>>> createStreamConfig(CLUSTER.bootstrapServers(), port, "one")); final CountDownLatch startupLatch = new CountDownLatch(1); kafkaStreams.setStateListener((newState, oldState) -> { if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) { startupLatch.countDown(); } }); <<<<<<< private Properties createStreamConfig(final String bootStrap, final int port, final String stateDir) throws IOException { ======= private Properties createStreamConfig(final String bootStrap, final int port, String stateDir) throws IOException { >>>>>>> private Properties createStreamConfig(final String bootStrap, final int port, final String stateDir) throws IOException { <<<<<<< streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "interactive-queries-wordcount-example"); ======= streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "interactive-queries-wordcount-example"); >>>>>>> streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "interactive-queries-wordcount-example");
<<<<<<< public void start(final String bootstrapServers, final String stateDir) { streams = processStreams(bootstrapServers, stateDir); ======= public void start(final String bootstrapServers) { streams = processStreams(bootstrapServers, "/tmp/kafka-streams"); >>>>>>> public void start(final String bootstrapServers, final String stateDir) { streams = processStreams(bootstrapServers, stateDir); <<<<<<< public static void main(String[] args) throws Exception { InventoryService service = new InventoryService(); service.start(parseArgsAndConfigure(args), "/tmp/kafka-streams"); ======= public static void main(final String[] args) throws Exception { final InventoryService service = new InventoryService(); service.start(parseArgsAndConfigure(args)); >>>>>>> public static void main(final String[] args) throws Exception { final InventoryService service = new InventoryService(); service.start(parseArgsAndConfigure(args), "/tmp/kafka-streams");
<<<<<<< List<Integer> inputValues = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); List<Integer> expectedValues = Collections.singletonList(30); ======= final List<Integer> inputValues = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); final List<Integer> expectedValues = Arrays.asList(30); >>>>>>> final List<Integer> inputValues = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); final List<Integer> expectedValues = Collections.singletonList(30); <<<<<<< StreamsBuilder builder = new StreamsBuilder(); ======= final KStreamBuilder builder = new KStreamBuilder(); >>>>>>> final StreamsBuilder builder = new StreamsBuilder(); <<<<<<< KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); ======= final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); >>>>>>> final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration);
<<<<<<< import io.confluent.examples.streams.ExampleTestUtils; ======= import io.confluent.examples.streams.microservices.util.MicroserviceTestUtils; >>>>>>> import io.confluent.examples.streams.ExampleTestUtils; import io.confluent.examples.streams.microservices.util.MicroserviceTestUtils; <<<<<<< songsStore = streams.store(KafkaMusicExample.ALL_SONGS, QueryableStoreTypes.keyValueStore()); ======= songsStore = streams.store(KafkaMusicExample.ALL_SONGS, QueryableStoreTypes.keyValueStore()); >>>>>>> songsStore = streams.store(KafkaMusicExample.ALL_SONGS, QueryableStoreTypes.keyValueStore()); <<<<<<< } catch (final Exception e) { ======= } catch (Exception e) { e.printStackTrace(); >>>>>>> } catch (final Exception e) { e.printStackTrace(); <<<<<<< ======= @Test public void shouldDemonstrateInteractiveQueriesOnAnyValidHost() throws Exception { final String host = "127.10.10.10"; createStreams(host); streams.start(); if (restProxy != null) { // wait until the StreamsMetadata is available as this indicates that // KafkaStreams initialization has occurred TestUtils.waitForCondition(() -> !StreamsMetadata.NOT_AVAILABLE.equals(streams.allMetadataForStore(KafkaMusicExample.TOP_FIVE_SONGS_STORE)), MAX_WAIT_MS, "StreamsMetadata should be available"); final String baseUrl = "http://" + host + ":" + appServerPort + "/kafka-music"; final Client client = ClientBuilder.newClient(); // Wait until the all-songs state store has some data in it TestUtils.waitForCondition(() -> { final ReadOnlyKeyValueStore<Long, Song> songsStore; try { songsStore = streams.store(KafkaMusicExample.ALL_SONGS, QueryableStoreTypes.keyValueStore()); return songsStore.all().hasNext(); } catch (Exception e) { return false; } }, MAX_WAIT_MS, KafkaMusicExample.ALL_SONGS + " should be non-empty"); final IntFunction<SongPlayCountBean> intFunction = index -> { final Song song = songs.get(index); return songCountPlayBean(song, 6L - (index % 6)); }; // Verify that the charts are as expected verifyChart(baseUrl + "/charts/genre/punk", client, IntStream.range(0, 5).mapToObj(intFunction).collect(Collectors.toList())); verifyChart(baseUrl + "/charts/genre/hip hop", client, IntStream.range(6, 11).mapToObj(intFunction).collect(Collectors.toList())); verifyChart(baseUrl + "/charts/top-five", client, Arrays.asList(songCountPlayBean(songs.get(0), 6L), songCountPlayBean(songs.get(6), 6L), songCountPlayBean(songs.get(1), 5L), songCountPlayBean(songs.get(7), 5L), songCountPlayBean(songs.get(2), 4L) ) ); } else { fail("Should fail demonstrating InteractiveQueries on any valid host as the Rest Service failed to start."); } } >>>>>>> <<<<<<< final List<SongPlayCountBean> expectedChart) throws InterruptedException { final Invocation.Builder genreChartRequest = client.target(url) .request(MediaType.APPLICATION_JSON_TYPE); ======= final List<SongPlayCountBean> expectedChart) throws InterruptedException { final Invocation.Builder genreChartRequest = client .target(url) .request(MediaType.APPLICATION_JSON_TYPE); >>>>>>> final List<SongPlayCountBean> expectedChart) throws InterruptedException { final Invocation.Builder genreChartRequest = client .target(url) .request(MediaType.APPLICATION_JSON_TYPE); <<<<<<< final List<SongPlayCountBean> chart = genreChartRequest.get(new GenericType<List<SongPlayCountBean>>() {}); return chart.equals(expectedChart); } catch (final Exception e) { ======= final List<SongPlayCountBean> chart = MicroserviceTestUtils.getWithRetries( genreChartRequest, new GenericType<List<SongPlayCountBean>>() {}, 0); System.err.println(chart.size()); return chart.size() == 5; } catch (Exception e) { e.printStackTrace(); >>>>>>> final List<SongPlayCountBean> chart = MicroserviceTestUtils.getWithRetries( genreChartRequest, new GenericType<List<SongPlayCountBean>>() {}, 0); System.err.println(chart.size()); return chart.size() == 5; } catch (final Exception e) { e.printStackTrace(); <<<<<<< }, MAX_WAIT_MS, "Returned chart should equal to the expected items"); ======= }, MAX_WAIT_MS, "chart should have 5 items"); final List<SongPlayCountBean> chart = MicroserviceTestUtils.getWithRetries( genreChartRequest, new GenericType<List<SongPlayCountBean>>() {}, 5); assertThat(chart, is(expectedChart)); >>>>>>> }, MAX_WAIT_MS, "chart should have 5 items"); final List<SongPlayCountBean> chart = MicroserviceTestUtils.getWithRetries( genreChartRequest, new GenericType<List<SongPlayCountBean>>() {}, 5); assertThat(chart, is(expectedChart));
<<<<<<< StreamsBuilder builder = new StreamsBuilder(); KStream<String, OrderValidation> validations = builder .stream(ORDER_VALIDATIONS.name(), serdes1); KStream<String, Order> orders = builder .stream(ORDERS.name(), serdes2) ======= final KStreamBuilder builder = new KStreamBuilder(); final KStream<String, OrderValidation> validations = builder .stream(ORDER_VALIDATIONS.keySerde(), ORDER_VALIDATIONS.valueSerde(), ORDER_VALIDATIONS.name()); final KStream<String, Order> orders = builder .stream(ORDERS.keySerde(), ORDERS.valueSerde(), ORDERS.name()) >>>>>>> final StreamsBuilder builder = new StreamsBuilder(); final KStream<String, OrderValidation> validations = builder .stream(ORDER_VALIDATIONS.name(), serdes1); final KStream<String, Order> orders = builder .stream(ORDERS.name(), serdes2)
<<<<<<< StreamsBuilder builder = new StreamsBuilder(); ======= final KStreamBuilder builder = new KStreamBuilder(); >>>>>>> final StreamsBuilder builder = new StreamsBuilder(); <<<<<<< KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); ======= final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); >>>>>>> final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration);
<<<<<<< import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.apache.kafka.common.serialization.ByteArraySerializer; ======= >>>>>>>
<<<<<<< import org.apache.kafka.streams.StreamsBuilder; ======= import org.apache.kafka.streams.KafkaStreams.State; >>>>>>> import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.KafkaStreams.State;
<<<<<<< import org.apache.kafka.streams.kstream.Consumed; ======= import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.kafka.streams.KafkaStreams.State; >>>>>>> import org.apache.kafka.streams.kstream.Consumed; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.kafka.streams.KafkaStreams.State; <<<<<<< log.info("Started Service " + SERVICE_APP_ID); ======= try { if (!startLatch.await(60, TimeUnit.SECONDS)) { throw new RuntimeException("Streams never finished rebalancing on startup"); } } catch (final InterruptedException e) { Thread.currentThread().interrupt(); } log.info("Started Service " + APP_ID); >>>>>>> try { if (!startLatch.await(60, TimeUnit.SECONDS)) { throw new RuntimeException("Streams never finished rebalancing on startup"); } } catch (final InterruptedException e) { Thread.currentThread().interrupt(); } log.info("Started Service " + SERVICE_APP_ID);
<<<<<<< Properties schemaRegistryProps = new Properties(); ======= schemaRegistry = new RestApp(0, zookeeperConnect(), KAFKA_SCHEMAS_TOPIC, AVRO_COMPATIBILITY_TYPE); // note: this following only goes to 3.3.x branches---don't merge into 4.x branches; // 4.x branches have a different fix to make SR access stable // this fix also requires to update pom.xml do depend on schema registry 3.3.2 (test artifact for unit tests only) final Properties schemaRegistryProps = new Properties(); >>>>>>> final Properties schemaRegistryProps = new Properties();
<<<<<<< StreamsBuilder builder = new StreamsBuilder(); KStream<String, String> alerts = builder.stream(adImpressionsTopic); KStream<String, String> incidents = builder.stream(adClicksTopic); ======= final KStreamBuilder builder = new KStreamBuilder(); final KStream<String, String> alerts = builder.stream(stringSerde, stringSerde, adImpressionsTopic); final KStream<String, String> incidents = builder.stream(stringSerde, stringSerde, adClicksTopic); >>>>>>> final StreamsBuilder builder = new StreamsBuilder(); final KStream<String, String> alerts = builder.stream(adImpressionsTopic); final KStream<String, String> incidents = builder.stream(adClicksTopic); <<<<<<< KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); ======= final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); >>>>>>> final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration);
<<<<<<< public void start(final String bootstrapServers, final String stateDir) { streams = processStreams(bootstrapServers, stateDir); ======= public void start(final String bootstrapServers) { streams = processStreams(bootstrapServers, "/tmp/kafka-streams"); >>>>>>> public void start(final String bootstrapServers, final String stateDir) { streams = processStreams(bootstrapServers, stateDir); <<<<<<< public static void main(String[] args) throws Exception { FraudService service = new FraudService(); service.start(parseArgsAndConfigure(args), "/tmp/kafka-streams"); ======= public static void main(final String[] args) throws Exception { final FraudService service = new FraudService(); service.start(parseArgsAndConfigure(args)); >>>>>>> public static void main(final String[] args) throws Exception { final FraudService service = new FraudService(); service.start(parseArgsAndConfigure(args), "/tmp/kafka-streams");
<<<<<<< ======= import static org.mockito.Mockito.*; import io.reflectoring.coderadar.CoderadarConfigurationProperties; >>>>>>> import static org.mockito.Mockito.*; import io.reflectoring.coderadar.CoderadarConfigurationProperties;
<<<<<<< * $ java -cp target/kafka-streams-examples-5.0.0-SNAPSHOT-standalone.jar \ * io.confluent.examples.streams.interactivequeries.InteractiveQueriesExample 7070 ======= * $ java -cp target/kafka-streams-examples-4.0.0-SNAPSHOT-standalone.jar \ * io.confluent.examples.streams.interactivequeries.WordCountInteractiveQueriesExample 7070 >>>>>>> * $ java -cp target/kafka-streams-examples-5.0.0-SNAPSHOT-standalone.jar \ * io.confluent.examples.streams.interactivequeries.WordCountInteractiveQueriesExample 7070 <<<<<<< * $ java -cp target/kafka-streams-examples-5.0.0-SNAPSHOT-standalone.jar \ * io.confluent.examples.streams.interactivequeries.InteractiveQueriesExample 7071 ======= * $ java -cp target/kafka-streams-examples-4.0.0-SNAPSHOT-standalone.jar \ * io.confluent.examples.streams.interactivequeries.WordCountInteractiveQueriesExample 7071 >>>>>>> * $ java -cp target/kafka-streams-examples-5.0.0-SNAPSHOT-standalone.jar \ * io.confluent.examples.streams.interactivequeries.WordCountInteractiveQueriesExample 7071
<<<<<<< import io.confluent.examples.streams.kafka.EmbeddedSingleNodeKafkaCluster; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.apache.kafka.common.serialization.ByteArraySerializer; ======= >>>>>>> <<<<<<< @ClassRule public static final EmbeddedSingleNodeKafkaCluster CLUSTER = new EmbeddedSingleNodeKafkaCluster(); private static final String inputTopic = "inputTopic"; private static final String outputTopic = "outputTopic"; private static final String storeName = "eventId-store"; @BeforeClass public static void startKafkaCluster() { CLUSTER.createTopic(inputTopic); CLUSTER.createTopic(outputTopic); } ======= private static final String storeName = "eventId-store"; >>>>>>> private static final String storeName = "eventId-store"; <<<<<<< thirdId, firstId, secondId); ======= thirdId, firstId, secondId); >>>>>>> thirdId, firstId, secondId); <<<<<<< Stores.persistentWindowStore(storeName, retentionPeriod, windowSize, false ), Serdes.String(), Serdes.Long()); ======= Stores.persistentWindowStore(storeName, retentionPeriod, numberOfSegments, maintainDurationPerEventInMs, false ), Serdes.String(), Serdes.Long()); >>>>>>> Stores.persistentWindowStore(storeName, retentionPeriod, windowSize, false ), Serdes.String(), Serdes.Long()); <<<<<<< // In this example, we assume that the record value as-is represents a unique event ID by // which we can perform de-duplication. If your records are different, adapt the extractor // function as needed. () -> new DeduplicationTransformer<>(windowSize.toMillis(), (key, value) -> value), storeName); ======= // In this example, we assume that the record value as-is represents a unique event ID by // which we can perform de-duplication. If your records are different, adapt the extractor // function as needed. () -> new DeduplicationTransformer<>(maintainDurationPerEventInMs, (key, value) -> value), storeName); >>>>>>> // In this example, we assume that the record value as-is represents a unique event ID by // which we can perform de-duplication. If your records are different, adapt the extractor // function as needed. () -> new DeduplicationTransformer<>(windowSize.toMillis(), (key, value) -> value), storeName); <<<<<<< final KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration); streams.start(); // // Step 2: Produce some input data to the input topic. // final Properties producerConfig = new Properties(); producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); producerConfig.put(ProducerConfig.ACKS_CONFIG, "all"); producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0); producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class); producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputValues, producerConfig); // // Step 3: Verify the application's output data. // final Properties consumerConfig = new Properties(); consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, "deduplication-integration-test-standard-consumer"); consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); final List<String> actualValues = IntegrationTestUtils.waitUntilMinValuesRecordsReceived( consumerConfig, outputTopic, expectedValues.size() ); streams.close(); assertThat(actualValues).containsExactlyElementsOf(expectedValues); ======= final TopologyTestDriver topologyTestDriver = new TopologyTestDriver(builder.build(), streamsConfiguration); try { // // Step 2: Produce some input data to the input topic. // IntegrationTestUtils.produceKeyValuesSynchronously( inputTopic, inputValues.stream().map(v -> new KeyValue<>(null, v)).collect(Collectors.toList()), topologyTestDriver, new IntegrationTestUtils.NothingSerde<>(), new StringSerializer() ); // // Step 3: Verify the application's output data. // final List<String> actualValues = IntegrationTestUtils.drainStreamOutput( outputTopic, topologyTestDriver, new IntegrationTestUtils.NothingSerde<>(), new StringDeserializer() ).stream().map(kv -> kv.value).collect(Collectors.toList()); assertThat(actualValues).containsExactlyElementsOf(expectedValues); } finally { topologyTestDriver.close(); } >>>>>>> try (final TopologyTestDriver topologyTestDriver = new TopologyTestDriver(builder.build(), streamsConfiguration)) { // // Step 2: Produce some input data to the input topic. // IntegrationTestUtils.produceKeyValuesSynchronously( inputTopic, inputValues.stream().map(v -> new KeyValue<>(null, v)).collect(Collectors.toList()), topologyTestDriver, new IntegrationTestUtils.NothingSerde<>(), new StringSerializer() ); // // Step 3: Verify the application's output data. // final List<String> actualValues = IntegrationTestUtils.drainStreamOutput( outputTopic, topologyTestDriver, new IntegrationTestUtils.NothingSerde<>(), new StringDeserializer() ).stream().map(kv -> kv.value).collect(Collectors.toList()); assertThat(actualValues).containsExactlyElementsOf(expectedValues); }
<<<<<<< import org.springframework.stereotype.Service; ======= >>>>>>> import org.springframework.stereotype.Service; <<<<<<< public List<MetricValueForCommit> get(GetMetricsForCommitCommand command, Long projectId) { CommitEntity commitEntity = getCommitsInProjectRepository.findByNameAndProjectId(command.getCommit(), projectId); List<MetricValueForCommitQueryResult> result = getMetricValuesOfCommitRepository.getMetricValuesForCommit( projectId, command.getMetrics(), commitEntity.getTimestamp().toInstant().toString()); ======= public List<MetricValueForCommit> get(GetMetricsForCommitCommand command) { List<MetricValueForCommitQueryResult> result = getMetricValuesOfCommitRepository.getMetricValuesForCommit( command.getCommit(), command.getMetrics()); >>>>>>> public List<MetricValueForCommit> get(GetMetricsForCommitCommand command, Long projectId) { CommitEntity commitEntity = getCommitsInProjectRepository.findByNameAndProjectId(command.getCommit(), projectId); List<MetricValueForCommitQueryResult> result = getMetricValuesOfCommitRepository.getMetricValuesForCommit( projectId, command.getMetrics(), commitEntity.getTimestamp().toInstant().toString());
<<<<<<< import javax.annotation.Resource; import org.springframework.web.bind.annotation.ResponseBody; ======= >>>>>>> import javax.annotation.Resource; import org.springframework.web.bind.annotation.ResponseBody;
<<<<<<< import org.msgpack.template.TemplateBuilder; import org.msgpack.template.TemplateClassWriter; ======= >>>>>>> import org.msgpack.template.TemplateClassWriter;
<<<<<<< case "kinesis": { log.info("Adding Kinesis Logger with properties: " + elProps); try { loggers.add(new KinesisLogger(elProps)); } catch (Exception ex) { log.error("Kinesis Logger unable to initialize", ex); } break; } ======= case "pulsar": { log.info("Adding Pulsar Logger with properties: " + elProps); try { loggers.add(new PulsarLogger(elProps)); } catch (final PulsarClientException ex) { log.error("Pulsar Logger unable to initialize", ex); } break; } >>>>>>> case "kinesis": { log.info("Adding Kinesis Logger with properties: " + elProps); try { loggers.add(new KinesisLogger(elProps)); } catch (Exception ex) { log.error("Kinesis Logger unable to initialize", ex); case "pulsar": { log.info("Adding Pulsar Logger with properties: " + elProps); try { loggers.add(new PulsarLogger(elProps)); } catch (final PulsarClientException ex) { log.error("Pulsar Logger unable to initialize", ex); } break; }
<<<<<<< ======= import java.io.IOException; >>>>>>> import java.io.IOException;
<<<<<<< import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; ======= import org.apache.http.client.methods.*; >>>>>>> import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; <<<<<<< final HttpEntity httpEntity = makeEntity(requestAction.getParams()); ======= >>>>>>> <<<<<<< PrintStream logger, boolean consolLogResponseBody) throws IOException, InterruptedException { ======= PrintStream logger, boolean logResponseBody, int timeout) throws IOException { doSecurity(client, method.getURI()); >>>>>>> PrintStream logger, boolean consolLogResponseBody, int timeout) throws IOException, InterruptedException { <<<<<<< final HttpResponse httpResponse = client.execute(method); logger.println("Response Code: " + httpResponse.getStatusLine()); if (consolLogResponseBody || outputFilePath != null) { String httpData = EntityUtils.toString(httpResponse.getEntity()); if (consolLogResponseBody) { logger.println("Response: \n" + httpData); } if (outputFilePath != null) { outputFilePath.write().write(httpData.getBytes()); } } EntityUtils.consume(httpResponse.getEntity()); return httpResponse; ======= if (timeout > 0) { client.getParams().setParameter("http.socket.timeout", timeout * 1000); client.getParams().setParameter("http.connection.timeout", timeout * 1000); client.getParams().setParameter("http.connection-manager.timeout", new Long(timeout * 1000)); client.getParams().setParameter("http.protocol.head-body-timeout", timeout * 1000); } final HttpResponse execute = client.execute(method); logger.println("Response Code: " + execute.getStatusLine()); if (logResponseBody){ logger.println("Response: \n" + EntityUtils.toString(execute.getEntity())); } EntityUtils.consume(execute.getEntity()); return execute; >>>>>>> if (timeout > 0) { client.getParams().setParameter("http.socket.timeout", timeout * 1000); client.getParams().setParameter("http.connection.timeout", timeout * 1000); client.getParams().setParameter("http.connection-manager.timeout", new Long(timeout * 1000)); client.getParams().setParameter("http.protocol.head-body-timeout", timeout * 1000); } final HttpResponse httpResponse = client.execute(method); logger.println("Response Code: " + httpResponse.getStatusLine()); if (consolLogResponseBody || outputFilePath != null) { String httpData = EntityUtils.toString(httpResponse.getEntity()); if (consolLogResponseBody) { logger.println("Response: \n" + httpData); } if (outputFilePath != null) { outputFilePath.write().write(httpData.getBytes()); } } EntityUtils.consume(httpResponse.getEntity()); return httpResponse;
<<<<<<< private String uploadFile = DescriptorImpl.uploadFile; private String multipartName = DescriptorImpl.multipartName; ======= private Boolean useSystemProperties = DescriptorImpl.useSystemProperties; >>>>>>> private String uploadFile = DescriptorImpl.uploadFile; private String multipartName = DescriptorImpl.multipartName; private Boolean useSystemProperties = DescriptorImpl.useSystemProperties; <<<<<<< public static final String uploadFile = ""; public static final String multipartName = ""; ======= public static final Boolean useSystemProperties = false; >>>>>>> public static final String uploadFile = ""; public static final String multipartName = ""; public static final Boolean useSystemProperties = false;
<<<<<<< private final FilePath uploadFile; private final String multipartName; ======= private final boolean useSystemProperties; >>>>>>> private final FilePath uploadFile; private final String multipartName; private final boolean useSystemProperties; <<<<<<< http.getAuthentication(), uploadFile, http.getMultipartName(), ======= http.getAuthentication(), http.getUseSystemProperties(), >>>>>>> uploadFile, http.getMultipartName(), http.getAuthentication(), http.getUseSystemProperties(), <<<<<<< step.getAuthentication(), uploadFile, step.getMultipartName(), ======= step.getAuthentication(), step.getUseSystemProperties(), >>>>>>> uploadFile, step.getMultipartName(), step.getAuthentication(), step.getUseSystemProperties(), <<<<<<< String authentication, FilePath uploadFile, String multipartName, ======= String authentication, boolean useSystemProperties, >>>>>>> FilePath uploadFile, String multipartName, String authentication, boolean useSystemProperties, <<<<<<< this.uploadFile = uploadFile; this.multipartName = multipartName; ======= this.useSystemProperties = useSystemProperties; >>>>>>> this.uploadFile = uploadFile; this.multipartName = multipartName; this.useSystemProperties = useSystemProperties;
<<<<<<< ======= setTypeface(TypefaceCache.getTypeface(context, TypefaceCache.TYPEFACE_NAME_ROBOTO_REGULAR)); setLinkTokenizer(); >>>>>>> setLinkTokenizer(); <<<<<<< ======= setTypeface(TypefaceCache.getTypeface(context, TypefaceCache.TYPEFACE_NAME_ROBOTO_REGULAR)); setLinkTokenizer(); >>>>>>> setLinkTokenizer(); <<<<<<< ======= setTypeface(TypefaceCache.getTypeface(context, TypefaceCache.TYPEFACE_NAME_ROBOTO_REGULAR)); setLinkTokenizer(); >>>>>>> setLinkTokenizer();
<<<<<<< import androidx.fragment.app.Fragment; ======= import androidx.appcompat.widget.SearchView; >>>>>>> import androidx.appcompat.widget.SearchView; import androidx.fragment.app.Fragment; <<<<<<< public class TagsListFragment extends Fragment implements Bucket.Listener<Tag> { ======= import static com.automattic.simplenote.models.Tag.NAME_PROPERTY; public class TagsListFragment extends Fragment implements ActionMode.Callback, Bucket.Listener<Tag> { private ActionMode mActionMode; private Bucket<Tag> mTagsBucket; >>>>>>> import static com.automattic.simplenote.models.Tag.NAME_PROPERTY; public class TagsListFragment extends Fragment implements Bucket.Listener<Tag> { <<<<<<< private Bucket<Tag> mTagsBucket; ======= private EmptyViewRecyclerView mTagsList; private ImageView mEmptyViewImage; private MenuItem mSearchMenuItem; private String mSearchQuery; >>>>>>> private Bucket<Tag> mTagsBucket; private EmptyViewRecyclerView mTagsList; private ImageView mEmptyViewImage; private MenuItem mSearchMenuItem; private String mSearchQuery; <<<<<<< EmptyViewRecyclerView recyclerView = requireActivity().findViewById(R.id.list); ======= mTagsList = getActivity().findViewById(R.id.list); >>>>>>> mTagsList = requireActivity().findViewById(R.id.list); <<<<<<< recyclerView.setAdapter(mTagsAdapter); recyclerView.setLayoutManager(new LinearLayoutManager(getActivity())); View emptyView = requireActivity().findViewById(R.id.empty); ImageView emptyViewImage = emptyView.findViewById(R.id.image); emptyViewImage.setImageResource(R.drawable.ic_tag_24dp); TextView emptyViewText = emptyView.findViewById(R.id.text); emptyViewText.setText(R.string.empty_tags); recyclerView.setEmptyView(emptyView); ======= mTagsList.setAdapter(mTagsAdapter); mTagsList.setLayoutManager(new LinearLayoutManager(getActivity())); View emptyView = getActivity().findViewById(R.id.empty); mEmptyViewImage = emptyView.findViewById(R.id.image); mEmptyViewText = emptyView.findViewById(R.id.text); checkEmptyList(); mTagsList.setEmptyView(emptyView); >>>>>>> mTagsList.setAdapter(mTagsAdapter); mTagsList.setLayoutManager(new LinearLayoutManager(requireActivity())); View emptyView = requireActivity().findViewById(R.id.empty); mEmptyViewImage = emptyView.findViewById(R.id.image); mEmptyViewText = emptyView.findViewById(R.id.text); checkEmptyList(); mTagsList.setEmptyView(emptyView); <<<<<<< ======= protected void refreshTagsSearch() { Query<Tag> tags = Tag.all(mTagsBucket) .where(NAME_PROPERTY, Query.ComparisonType.LIKE, "%" + mSearchQuery + "%") .orderByKey().include(Tag.NOTE_COUNT_INDEX_NAME) .reorder(); Bucket.ObjectCursor<Tag> cursor = tags.execute(); mTagsAdapter.swapCursor(cursor); } private void setEmptyListImage(@DrawableRes int image) { if (mEmptyViewImage != null) { if (image != -1) { mEmptyViewImage.setVisibility(View.VISIBLE); mEmptyViewImage.setImageResource(image); } else { mEmptyViewImage.setVisibility(View.GONE); } } } private void setEmptyListMessage(String message) { if (mEmptyViewText != null && message != null) { mEmptyViewText.setText(message); } } // TODO: Finish bulk editing @Override public boolean onCreateActionMode(ActionMode actionMode, Menu menu) { MenuInflater inflater = actionMode.getMenuInflater(); inflater.inflate(R.menu.bulk_edit, menu); mActionMode = actionMode; return true; } @Override public boolean onPrepareActionMode(ActionMode actionMode, Menu menu) { return false; } @Override public boolean onActionItemClicked(ActionMode actionMode, MenuItem menuItem) { if (menuItem.getItemId() == R.id.menu_trash) { actionMode.finish(); // Action picked, so close the CAB return true; } return false; } @Override public void onDestroyActionMode(ActionMode actionMode) { mActionMode = null; } >>>>>>> protected void refreshTagsSearch() { Query<Tag> tags = Tag.all(mTagsBucket) .where(NAME_PROPERTY, Query.ComparisonType.LIKE, "%" + mSearchQuery + "%") .orderByKey().include(Tag.NOTE_COUNT_INDEX_NAME) .reorder(); Bucket.ObjectCursor<Tag> cursor = tags.execute(); mTagsAdapter.swapCursor(cursor); } private void setEmptyListImage(@DrawableRes int image) { if (mEmptyViewImage != null) { if (image != -1) { mEmptyViewImage.setVisibility(View.VISIBLE); mEmptyViewImage.setImageResource(image); } else { mEmptyViewImage.setVisibility(View.GONE); } } } private void setEmptyListMessage(String message) { if (mEmptyViewText != null && message != null) { mEmptyViewText.setText(message); } }
<<<<<<< import com.actionbarsherlock.view.MenuItem; import com.automattic.simplenote.models.Note; import com.simperium.client.Bucket; ======= import com.simperium.client.*; import com.automattic.simplenote.models.*; import com.actionbarsherlock.view.MenuItem; >>>>>>> import com.actionbarsherlock.view.MenuItem; import com.automattic.simplenote.models.Note; import com.simperium.client.Bucket; import com.simperium.client.*;
<<<<<<< Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); EasyTracker.getInstance().activityStart(this); mTracker = EasyTracker.getTracker(); ======= mTracker = currentApp.getTracker(); >>>>>>> Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); mTracker = currentApp.getTracker(); <<<<<<< ======= case R.id.menu_create_note: getNoteListFragment().addNote(); mTracker.send( new HitBuilders.EventBuilder() .setCategory("note") .setAction("create_note") .setLabel("action_bar_button") .build() ); return true; >>>>>>> case R.id.menu_create_note: getNoteListFragment().addNote(); mTracker.send( new HitBuilders.EventBuilder() .setCategory("note") .setAction("create_note") .setLabel("action_bar_button") .build() ); return true;
<<<<<<< import android.support.design.widget.Snackbar; ======= import android.support.v4.app.Fragment; >>>>>>> import android.support.design.widget.Snackbar; import android.support.v4.app.Fragment; <<<<<<< ======= import com.commonsware.cwac.anddown.AndDown; import com.kennyc.bottomsheet.BottomSheet; import com.kennyc.bottomsheet.BottomSheetListener; >>>>>>> import com.commonsware.cwac.anddown.AndDown; <<<<<<< private static final int PUBLISH_TIMEOUT = 20000; private static final int HISTORY_TIMEOUT = 10000; ======= public static final int THEME_LIGHT = 0; public static final int THEME_DARK = 1; >>>>>>> private static final int PUBLISH_TIMEOUT = 20000; private static final int HISTORY_TIMEOUT = 10000; public static final int THEME_LIGHT = 0; public static final int THEME_DARK = 1; <<<<<<< private boolean mIsNewNote, mIsLoadingNote; ======= private boolean mIsNewNote, mIsLoadingNote, mDidTapHistoryButton, mIsMarkdownEnabled, mIsMarkdownEnabledGlobal; >>>>>>> private boolean mIsNewNote, mIsLoadingNote, mIsMarkdownEnabled, mIsMarkdownEnabledGlobal; <<<<<<< private String mLastContentString; private HistoryBottomSheetDialog mHistoryBottomSheet; private InfoBottomSheetDialog mInfoBottomSheet; private ShareBottomSheetDialog mShareBottomSheet; private Snackbar mPublishingSnackbar; ======= private ArrayList<Note> mNoteRevisionsList; private NoteMarkdownFragment mNoteMarkdownFragment; private String mCss; private WebView mMarkdown; >>>>>>> private String mLastContentString; private HistoryBottomSheetDialog mHistoryBottomSheet; private InfoBottomSheetDialog mInfoBottomSheet; private ShareBottomSheetDialog mShareBottomSheet; private Snackbar mPublishingSnackbar; private NoteMarkdownFragment mNoteMarkdownFragment; private String mCss; private WebView mMarkdown; <<<<<<< getActivity().invalidateOptionsMenu(); ======= mMarkdown = (WebView) rootView.findViewById(R.id.markdown); switch (PrefUtils.getIntPref(getActivity(), PrefUtils.PREF_THEME, THEME_LIGHT)) { case THEME_DARK: mCss = "<link rel=\"stylesheet\" type=\"text/css\" href=\"dark.css\" />"; break; case THEME_LIGHT: mCss = "<link rel=\"stylesheet\" type=\"text/css\" href=\"light.css\" />"; break; } >>>>>>> getActivity().invalidateOptionsMenu(); mMarkdown = (WebView) rootView.findViewById(R.id.markdown); switch (PrefUtils.getIntPref(getActivity(), PrefUtils.PREF_THEME, THEME_LIGHT)) { case THEME_DARK: mCss = "<link rel=\"stylesheet\" type=\"text/css\" href=\"dark.css\" />"; break; case THEME_LIGHT: mCss = "<link rel=\"stylesheet\" type=\"text/css\" href=\"light.css\" />"; break; } <<<<<<< private void saveNote() { if (mNote == null || (mHistoryBottomSheet != null && mHistoryBottomSheet.isShowing())) { ======= protected void saveNote() { if (mNote == null || (mBottomSheet != null && mBottomSheet.isShowing())) { >>>>>>> protected void saveNote() { if (mNote == null || (mHistoryBottomSheet != null && mHistoryBottomSheet.isShowing())) { <<<<<<< if (mNote.hasChanges(content, tagString.trim(), mNote.isPinned())) { ======= if (mNote.hasChanges(content, tagString.trim(), mPinButton.isChecked(), mIsMarkdownEnabled)) { >>>>>>> if (mNote.hasChanges(content, tagString.trim(), mNote.isPinned(), mIsMarkdownEnabled)) {
<<<<<<< ======= import android.widget.CompoundButton; import android.widget.FrameLayout; import android.widget.ImageButton; >>>>>>> import android.widget.FrameLayout; <<<<<<< ======= import com.automattic.simplenote.utils.PrefUtils; import com.google.android.material.bottomsheet.BottomSheetBehavior; import com.google.android.material.bottomsheet.BottomSheetDialog; >>>>>>> import com.google.android.material.bottomsheet.BottomSheetBehavior; import com.google.android.material.bottomsheet.BottomSheetDialog; <<<<<<< ======= getDialog().setOnDismissListener(new DialogInterface.OnDismissListener() { @Override public void onDismiss(DialogInterface dialog) { mListener.onInfoDismissed(); } }); // Set peek height to full height of view (i.e. set STATE_EXPANDED) to avoid buttons // being off screen when bottom sheet is shown. getDialog().setOnShowListener(new DialogInterface.OnShowListener() { @Override public void onShow(DialogInterface dialogInterface) { BottomSheetDialog bottomSheetDialog = (BottomSheetDialog) dialogInterface; FrameLayout bottomSheet = bottomSheetDialog.findViewById(com.google.android.material.R.id.design_bottom_sheet); if (bottomSheet != null) { BottomSheetBehavior behavior = BottomSheetBehavior.from(bottomSheet); behavior.setState(BottomSheetBehavior.STATE_EXPANDED); behavior.setSkipCollapsed(true); } } }); >>>>>>> // Set peek height to full height of view (i.e. set STATE_EXPANDED) to avoid buttons // being off screen when bottom sheet is shown. getDialog().setOnShowListener(new DialogInterface.OnShowListener() { @Override public void onShow(DialogInterface dialogInterface) { BottomSheetDialog bottomSheetDialog = (BottomSheetDialog) dialogInterface; FrameLayout bottomSheet = bottomSheetDialog.findViewById(com.google.android.material.R.id.design_bottom_sheet); if (bottomSheet != null) { BottomSheetBehavior behavior = BottomSheetBehavior.from(bottomSheet); behavior.setState(BottomSheetBehavior.STATE_EXPANDED); behavior.setSkipCollapsed(true); } } });
<<<<<<< import android.support.annotation.NonNull; ======= import android.support.annotation.NonNull; import android.support.v4.app.ActivityCompat; import android.support.v4.app.ActivityOptionsCompat; >>>>>>> import android.support.annotation.NonNull; import android.support.v4.app.ActivityCompat; import android.support.v4.app.ActivityOptionsCompat; <<<<<<< private int mTitleFontSize; private int mPreviewFontSize; ======= private Tracker mTracker; >>>>>>> private int mTitleFontSize; private int mPreviewFontSize; private Tracker mTracker; <<<<<<< ======= @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); Simplenote application = (Simplenote) getActivity().getApplication(); mTracker = application.getTracker(); } >>>>>>> @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); Simplenote application = (Simplenote) getActivity().getApplication(); mTracker = application.getTracker(); } <<<<<<< public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.notes_list, container, false); ======= public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_notes_list, container, false); >>>>>>> public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_notes_list, container, false);
<<<<<<< public class Simplenote extends Application { public static final String DELETED_NOTE_ID = "deletedNoteId"; public static final String SELECTED_NOTE_ID = "selectedNoteId"; public static final String TAG = "Simplenote"; public static final int INTENT_EDIT_NOTE = 2; public static final int INTENT_PREFERENCES = 1; public static final int ONE_MINUTE_MILLIS = 60 * 1000; // 60 seconds public static final int TEN_SECONDS_MILLIS = 10 * 1000; // 10 seconds private static final String AUTH_PROVIDER = "simplenote.com"; private static final String TAG_SYNC = "sync"; private static Bucket<Preferences> mPreferencesBucket; ======= public class Simplenote extends Application implements HeartbeatListener { public static final String DELETED_NOTE_ID = "deletedNoteId"; public static final String SELECTED_NOTE_ID = "selectedNoteId"; public static final String TAG = "Simplenote"; public static final int INTENT_EDIT_NOTE = 2; public static final int INTENT_PREFERENCES = 1; private static final String AUTH_PROVIDER = "simplenote.com"; private static final int TEN_SECONDS_MILLIS = 10000; private static final long HEARTBEAT_TIMEOUT = WebSocketManager.HEARTBEAT_INTERVAL * 2; private static Bucket<Preferences> mPreferencesBucket; >>>>>>> public class Simplenote extends Application implements HeartbeatListener { public static final String DELETED_NOTE_ID = "deletedNoteId"; public static final String SELECTED_NOTE_ID = "selectedNoteId"; public static final String TAG = "Simplenote"; public static final int INTENT_EDIT_NOTE = 2; public static final int INTENT_PREFERENCES = 1; public static final int ONE_MINUTE_MILLIS = 60 * 1000; // 60 seconds public static final int TEN_SECONDS_MILLIS = 10 * 1000; // 10 seconds private static final String AUTH_PROVIDER = "simplenote.com"; private static final String TAG_SYNC = "sync"; private static final long HEARTBEAT_TIMEOUT = WebSocketManager.HEARTBEAT_INTERVAL * 2; private static Bucket<Preferences> mPreferencesBucket; <<<<<<< private Simperium mSimperium; private boolean mIsInBackground = true; ======= private Handler mHeartbeatHandler; private Runnable mHeartbeatRunnable; private Simperium mSimperium; >>>>>>> private Handler mHeartbeatHandler; private Runnable mHeartbeatRunnable; private Simperium mSimperium; private boolean mIsInBackground = true;
<<<<<<< public void think(ChipState chip) { chip.title("RECIEVER"); String id = chip.text().getLine3(); if (!id.isEmpty()) { Boolean out = MC1110.airwaves.get(id); if (out == null) { chip.out(1).set(false); } else { chip.out(1).set(out); } } else { chip.out(1).set(false); } } ======= /** * Get the title of the IC. * * @return */ public String getTitle() { return "RECEIVER"; } /** * Think. * * @param chip */ public void think(ChipState chip) { if (chip.getIn(1).is()) { String id = chip.getText().getLine3(); if (!id.isEmpty()) { Boolean out = MC1110.airwaves.get(id); if (out == null) { chip.getOut(1).set(false); } else { chip.getOut(1).set(out); } } else { chip.getOut(1).set(false); } } } >>>>>>> /** * Get the title of the IC. * * @return */ public String getTitle() { return "RECEIVER"; } /** * Think. * * @param chip */ public void think(ChipState chip) { String id = chip.getText().getLine3(); if (!id.isEmpty()) { Boolean out = MC1110.airwaves.get(id); if (out == null) { chip.getOut(1).set(false); } else { chip.getOut(1).set(out); } } else { chip.getOut(1).set(false); } }
<<<<<<< ======= private String[] generateICText(Player p) { ArrayList<String> icNameList = new ArrayList<String>(); icNameList.addAll(icList.keySet()); Collections.sort(icNameList); ArrayList<String> strings = new ArrayList<String>(); for(String ic:icNameList) { RegisteredIC ric = icList.get(ic); boolean canUse = canCreateIC(p,ic,ric); boolean instant = ric.type.isInstantIC; if(listUnusuableICs) strings.add(Colors.Rose +ic+" ("+ric.type.name+")"+ (instant?" (INSTANT)":"")+": "+ric.ic.getTitle()+(canUse?"":" (RESTRICTED)")); else if(canUse) strings.add(Colors.Rose +ic+" ("+ric.type.name+")"+ (instant?" (INSTANT)":"")+": "+ric.ic.getTitle()); } return strings.toArray(new String[0]); } >>>>>>>
<<<<<<< public final static String METHODSTRING_RECONNECTWIFIAP = "ReconnectWifiAP"; public final static String METHODSTRING_ISBLESUPPORTED = "IsBLESupported"; ======= public final static String METHODSTRING_ISBLESUPPORTED = "IsBLESupported"; >>>>>>> public final static String METHODSTRING_RECONNECTWIFIAP = "ReconnectWifiAP"; public final static String METHODSTRING_ISBLESUPPORTED = "IsBLESupported"; <<<<<<< <<<<<<< HEAD <<<<<<< HEAD ======= ======= >>>>>>> Resolved rebase merge conflicts jxcore.RegisterMethod(METHODSTRING_RECONNECTWIFIAP, new JXcoreCallback() { @Override public void Receiver(ArrayList<Object> params, String callbackId) { ArrayList<Object> args = new ArrayList<Object>(); WifiManager wifiManager = (WifiManager) jxcore.activity.getBaseContext().getSystemService(Context.WIFI_SERVICE); if (wifiManager.reconnect()) { wifiManager.disconnect(); if (!wifiManager.reconnect()) { args.add("reconnect returned false"); jxcore.CallJSMethod(callbackId, args.toArray()); return; } } //all is well, so lets return null as first argument args.add(null); jxcore.CallJSMethod(callbackId, args.toArray()); } }); >>>>>>> Commit to #345: Renamed classes, cleaned up the code and improved logging. ======= //Jukka's stuff final BtConnectorHelper mBtConnectorHelper = new BtConnectorHelper(); >>>>>>> jxcore.RegisterMethod(METHODSTRING_RECONNECTWIFIAP, new JXcoreCallback() { @Override public void Receiver(ArrayList<Object> params, String callbackId) { ArrayList<Object> args = new ArrayList<Object>(); WifiManager wifiManager = (WifiManager) jxcore.activity.getBaseContext().getSystemService(Context.WIFI_SERVICE); if (wifiManager.reconnect()) { wifiManager.disconnect(); if (!wifiManager.reconnect()) { args.add("reconnect returned false"); jxcore.CallJSMethod(callbackId, args.toArray()); return; } } //all is well, so lets return null as first argument args.add(null); jxcore.CallJSMethod(callbackId, args.toArray()); } });
<<<<<<< import org.junit.runner.notification.Failure; import org.thaliproject.p2p.btconnectorlib.PeerProperties; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; ======= import org.thaliproject.p2p.btconnectorlib.PeerProperties; >>>>>>> import org.junit.runner.notification.Failure; import org.thaliproject.p2p.btconnectorlib.PeerProperties; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; <<<<<<< ======= } static String TAG = "RegisterExecuteUT"; private static void FireTestedMethod(String methodName) { ConnectionHelperTest.mConnectionHelper = new ConnectionHelper(); switch (methodName) { case "onPeerLost": ConnectionHelperTest.mConnectionHelper .onPeerLost(new PeerProperties("11:22:33:22:11:00")); break; case "onPeerDiscovered": ConnectionHelperTest.mConnectionHelper .onPeerDiscovered(new PeerProperties("33:44:55:44:33:22")); break; default : Log.e(TAG, "Method called in FireTestedMethod doesn't exists!"); break; } >>>>>>> <<<<<<< jxcore.RegisterMethod("TestNativeMethod", new jxcore.JXcoreCallback() { @Override public void Receiver(ArrayList<Object> params, final String callbackId) { String methodToTest = ""; if (params.size() == 0) { Log.e(TAG, "Required parameter (toast message) missing"); } else { methodToTest = params.get(0).toString(); FireTestedMethod(methodToTest); } try { Thread.sleep(2000); } catch (InterruptedException e) { e.printStackTrace(); } JSONObject jsonObject = new JSONObject(); try { jsonObject.put("Testing_", methodToTest); } catch (JSONException e) { e.printStackTrace(); } final String jsonObjectAsString = jsonObject.toString(); jxcore.CallJSMethod(callbackId, jsonObjectAsString); } }); ======= jxcore.RegisterMethod("testNativeMethod", new jxcore.JXcoreCallback() { @Override public void Receiver(ArrayList<Object> params, final String callbackId) { String methodToTest = ""; if (params.size() == 0) { Log.e(TAG, "Required parameter is missing"); } else { methodToTest = params.get(0).toString(); FireTestedMethod(methodToTest); } JSONObject jsonObject = new JSONObject(); try { jsonObject.put("Testing_", methodToTest); } catch (JSONException e) { e.printStackTrace(); } final String jsonObjectAsString = jsonObject.toString(); jxcore.CallJSMethod(callbackId, jsonObjectAsString); } }); >>>>>>> jxcore.RegisterMethod("testNativeMethod", new jxcore.JXcoreCallback() { @Override public void Receiver(ArrayList<Object> params, final String callbackId) { String methodToTest = ""; if (params.size() == 0) { Log.e(TAG, "Required parameter is missing"); } else { methodToTest = params.get(0).toString(); FireTestedMethod(methodToTest); } JSONObject jsonObject = new JSONObject(); try { jsonObject.put("Testing_", methodToTest); } catch (JSONException e) { e.printStackTrace(); } final String jsonObjectAsString = jsonObject.toString(); jxcore.CallJSMethod(callbackId, jsonObjectAsString); } }); <<<<<<< ConnectionHelperTest.mConnectionHelper = new ConnectionHelper(); String logtag = "executeNativeTests"; ======= ConnectionHelperTest.mConnectionHelper = new ConnectionHelper(); String logtag = "ExecuteNativeTests"; >>>>>>> ConnectionHelperTest.mConnectionHelper = new ConnectionHelper(); String logtag = "ExecuteNativeTests";
<<<<<<< import android.util.Log; import com.test.thalitest.ThaliTestRunner; ======= import android.bluetooth.BluetoothSocket; import android.system.ErrnoException; import android.system.OsConstants; import android.util.Log; import com.test.thalitest.ThaliTestRunner; >>>>>>> import android.bluetooth.BluetoothSocket; import android.system.ErrnoException; import android.system.OsConstants; import android.util.Log; import com.test.thalitest.ThaliTestRunner; <<<<<<< import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; ======= import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; >>>>>>> import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; <<<<<<< static String mTag = OutgoingSocketThreadTest.class.getName(); private ByteArrayOutputStream outgoingOutputStream; private OutgoingSocketThreadMock mOutgoingSocketThread; private String textOutgoing = "Nullam in massa. Vivamus elit odio, in neque ut congue quis, " + "venenatis placerat, nulla ornare suscipit, erat urna, pellentesque dapibus vel, " + "lorem. Sed egestas non, dolor. Aliquam hendrerit sollicitudin sed."; private ByteArrayOutputStream incomingOutputStream; private IncomingSocketThreadMock mIncomingSocketThread; private ExecutorService mExecutor; private CountDownLatch copyingFinishedLatch; @Rule public TestRule watcher = new TestWatcher() { protected void starting(Description description) { Log.i(mTag, "Starting test: " + description.getMethodName()); } }; public static final int TEST_PORT_NUMBER = 57775; ======= static String mTag = OutgoingSocketThreadTest.class.getName(); private ByteArrayOutputStream outgoingOutputStream; private ListenerMock mListenerMockOutgoing; private InputStreamMock mInputStreamMockOutgoing; private OutputStreamMockOutgoing mOutputStreamMockOutgoing; private OutgoingSocketThreadMock mOutgoingSocketThread; private String textOutgoing = "Nullam in massa. Vivamus elit odio, in neque ut congue quis, " + "venenatis placerat, nulla ornare suscipit, erat urna, pellentesque dapibus vel, " + "lorem. Sed egestas non, dolor. Aliquam hendrerit sollicitudin sed."; final int testPortNumber = 57775; private ByteArrayOutputStream incomingOutputStream; private ListenerMock mListenerMockIncoming; private InputStreamMock mInputStreamMockIncoming; private OutputStreamMockIncoming mOutputStreamMockIncoming; private IncomingSocketThreadMock mIncomingSocketThread; private String textIncoming = "Lorem ipsum dolor sit amet elit nibh, imperdiet dignissim, " + "imperdiet wisi. Morbi vel risus. Nunc molestie placerat, nulla mi, id nulla ornare " + "risus. Sed lacinia, urna eros lacus, elementum eu."; ExecutorService mExecutor; @Rule public TestRule watcher = new TestWatcher() { protected void starting(Description description) { Log.i(mTag, "Starting test: " + description.getMethodName()); } }; >>>>>>> static String mTag = OutgoingSocketThreadTest.class.getName(); private ByteArrayOutputStream outgoingOutputStream; private ListenerMock mListenerMockOutgoing; private InputStreamMock mInputStreamMockOutgoing; private OutputStreamMockOutgoing mOutputStreamMockOutgoing; private OutgoingSocketThreadMock mOutgoingSocketThread; private CountDownLatch copyingFinishedLatch; private String textOutgoing = "Nullam in massa. Vivamus elit odio, in neque ut congue quis, " + "venenatis placerat, nulla ornare suscipit, erat urna, pellentesque dapibus vel, " + "lorem. Sed egestas non, dolor. Aliquam hendrerit sollicitudin sed."; final int testPortNumber = 57775; private ByteArrayOutputStream incomingOutputStream; private ListenerMock mListenerMockIncoming; private InputStreamMock mInputStreamMockIncoming; private OutputStreamMockIncoming mOutputStreamMockIncoming; private IncomingSocketThreadMock mIncomingSocketThread; private String textIncoming = "Lorem ipsum dolor sit amet elit nibh, imperdiet dignissim, " + "imperdiet wisi. Morbi vel risus. Nunc molestie placerat, nulla mi, id nulla ornare " + "risus. Sed lacinia, urna eros lacus, elementum eu."; ExecutorService mExecutor; @Rule public TestRule watcher = new TestWatcher() { protected void starting(Description description) { Log.i(mTag, "Starting test: " + description.getMethodName()); } }; <<<<<<< new OutgoingSocketThreadMock(null, mListenerMockOutgoing, inputStreamMockOutgoing, outputStreamMockOutgoing); } ======= new OutgoingSocketThreadMock(null, mListenerMockOutgoing, mInputStreamMockOutgoing, mOutputStreamMockOutgoing); >>>>>>> new OutgoingSocketThreadMock(null, mListenerMockOutgoing, mInputStreamMockOutgoing, mOutputStreamMockOutgoing); }
<<<<<<< import org.geowebcache.config.BlobStoreInfo; ======= import org.geowebcache.config.BlobStoreConfig; import org.geowebcache.config.Info; >>>>>>> import org.geowebcache.config.BlobStoreInfo; import org.geowebcache.config.Info;
<<<<<<< InvocationAssistance invocationAssistance = new InvocationAssistanceBuilder().withIsScriptSrc(true).withIsCompiledSrc(true).build(); CoverityPublisher publisher = new CoverityPublisherBuilder().withInvocationAssistance(invocationAssistance).build(); Command covBuildCommand = new CovBuildCommand(build, launcher, listener, publisher, StringUtils.EMPTY, envVars); ======= InvocationAssistance invocationAssistance = new InvocationAssistance( false, StringUtils.EMPTY, false, StringUtils.EMPTY, true, true, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, false, StringUtils.EMPTY, StringUtils.EMPTY, null, false, false, StringUtils.EMPTY, StringUtils.EMPTY, null, false ); CoverityPublisher publisher = new CoverityPublisher( null, invocationAssistance, false, false, false, false, false, null, null ); Command covBuildCommand = new CovBuildCommand(build, launcher, listener, publisher, StringUtils.EMPTY, envVars, true); >>>>>>> InvocationAssistance invocationAssistance = new InvocationAssistanceBuilder().withIsScriptSrc(true).withIsCompiledSrc(true).build(); CoverityPublisher publisher = new CoverityPublisherBuilder().withInvocationAssistance(invocationAssistance).build(); Command covBuildCommand = new CovBuildCommand(build, launcher, listener, publisher, StringUtils.EMPTY, envVars, true);
<<<<<<< private final String javaWarFile; ======= private final String csharpMsvscaOutputFiles; private final boolean csharpAutomaticAssemblies; private final boolean csharpMsvsca; >>>>>>> private final String javaWarFile; private final String csharpMsvscaOutputFiles; private final boolean csharpAutomaticAssemblies; private final boolean csharpMsvsca; <<<<<<< public InvocationAssistance(String buildArguments, String analyzeArguments, String commitArguments, String intermediateDir, String csharpAssemblies, String javaWarFile, String saOverride, String covBuildBlacklist) { ======= public InvocationAssistance(String buildArguments, String analyzeArguments, String commitArguments, String intermediateDir, String csharpAssemblies, String csharpMsvscaOutputFiles, boolean csharpMsvsca, boolean csharpAutomaticAssemblies, String saOverride, String covBuildBlacklist) { >>>>>>> public InvocationAssistance(String buildArguments, String analyzeArguments, String commitArguments, String intermediateDir, String csharpAssemblies, String javaWarFile, String csharpMsvscaOutputFiles, boolean csharpAutomaticAssemblies, boolean csharpMsvsca, String saOverride, String covBuildBlacklist) { <<<<<<< this.javaWarFile = Util.fixEmpty(javaWarFile); ======= this.csharpMsvscaOutputFiles = Util.fixEmpty(csharpMsvscaOutputFiles); this.csharpMsvsca = csharpMsvsca; this.csharpAutomaticAssemblies = csharpAutomaticAssemblies; >>>>>>> this.javaWarFile = Util.fixEmpty(javaWarFile); this.csharpMsvscaOutputFiles = Util.fixEmpty(csharpMsvscaOutputFiles); this.csharpMsvsca = csharpMsvsca; this.csharpAutomaticAssemblies = csharpAutomaticAssemblies; <<<<<<< public String getJavaWarFile() { return javaWarFile; } ======= public String getCsharpMsvscaOutputFiles() { return csharpMsvscaOutputFiles; } public boolean getCsharpMsvsca() { return csharpMsvsca; } public boolean getCsharpAutomaticAssemblies() { return csharpAutomaticAssemblies; } >>>>>>> public String getJavaWarFile() { return javaWarFile; } public String getCsharpMsvscaOutputFiles() { return csharpMsvscaOutputFiles; } public boolean getCsharpMsvsca() { return csharpMsvsca; } public boolean getCsharpAutomaticAssemblies() { return csharpAutomaticAssemblies; } <<<<<<< if(javaWarFile != null ? !javaWarFile.equals(that.javaWarFile) : that.javaWarFile != null) return false; ======= if(csharpMsvscaOutputFiles != null ? !csharpMsvscaOutputFiles.equals(that.csharpMsvscaOutputFiles) : that.csharpMsvscaOutputFiles != null) return false; if(csharpAutomaticAssemblies != that.csharpAutomaticAssemblies) return false; if(csharpMsvsca != that.csharpMsvsca) return false; >>>>>>> if(csharpMsvscaOutputFiles != null ? !csharpMsvscaOutputFiles.equals(that.csharpMsvscaOutputFiles) : that.csharpMsvscaOutputFiles != null) return false; if(intermediateDir != null ? !intermediateDir.equals(that.intermediateDir) : that.intermediateDir != null) return false; if(javaWarFile != null ? !javaWarFile.equals(that.javaWarFile) : that.javaWarFile != null) return false; <<<<<<< result = 31 * result + (javaWarFile != null ? javaWarFile.hashCode() : 0); ======= result = 31 * result + (csharpMsvscaOutputFiles != null ? csharpMsvscaOutputFiles.hashCode() : 0); result = 31 * result + (csharpAutomaticAssemblies ? 1 : 0); result = 31 * result + (csharpMsvsca ? 1 : 0); >>>>>>> result = 31 * result + (javaWarFile != null ? javaWarFile.hashCode() : 0); result = 31 * result + (csharpMsvscaOutputFiles != null ? csharpMsvscaOutputFiles.hashCode() : 0); result = 31 * result + (csharpAutomaticAssemblies ? 1 : 0); result = 31 * result + (csharpMsvsca ? 1 : 0);
<<<<<<< import com.google.openrtb.OpenRtb.BidRequest.Geo.Builder; import com.google.openrtb.OpenRtbNative; ======= >>>>>>> import com.google.openrtb.OpenRtbNative;
<<<<<<< Map<ResourceGroup, List<RequestMappingContext>> resourceGroupRequestMappings = new HashMap<>(); ======= ArrayListMultimap<ResourceGroup, RequestMappingContext> resourceGroupRequestMappings = ArrayListMultimap.create(); int requestMappingContextId = 0; >>>>>>> Map<ResourceGroup, List<RequestMappingContext>> resourceGroupRequestMappings = new HashMap<>(); int requestMappingContextId = 0; <<<<<<< resourceGroupRequestMappings.putIfAbsent(resourceGroup, new ArrayList<>()); resourceGroupRequestMappings.get(resourceGroup).add(requestMappingContext); ======= resourceGroupRequestMappings.put(resourceGroup, requestMappingContext); ++requestMappingContextId; >>>>>>> resourceGroupRequestMappings.putIfAbsent( resourceGroup, new ArrayList<>()); resourceGroupRequestMappings.get(resourceGroup).add(requestMappingContext); ++requestMappingContextId;
<<<<<<< public Optional<PersistentEntity<?, ?>> entity() { return entities.getPersistentEntity(resource.getDomainType()); ======= public PersistentEntity<?, ? extends PersistentProperty<?>> entity() { Object domainType = resource.getDomainType(); OptionalDeferencer<Class<?>> converter = new OptionalDeferencer<>(); Class actualDomainType = converter.convert(domainType); return entities.getPersistentEntity(actualDomainType).orElse(null); >>>>>>> public Optional<PersistentEntity<?, ? extends PersistentProperty<?>> entity() { return entities.getPersistentEntity(resource.getDomainType());
<<<<<<< import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; ======= >>>>>>>
<<<<<<< ======= import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.base.Optional; >>>>>>> <<<<<<< this.allowableValues = ofNullable(allowableValues); this.itemModel = ofNullable(itemModel); ======= this.allowableValues = Optional.fromNullable(allowableValues); this.itemModel = Optional.fromNullable(itemModel); this.modelId = Optional.fromNullable(modelId); >>>>>>> this.allowableValues = Optional.ofNullable(allowableValues); this.itemModel = Optional.ofNullable(itemModel); this.modelId = Optional.ofNullable(modelId);
<<<<<<< try{ misraConfigFile.createNewFile(); InvocationAssistance invocationAssistance = new InvocationAssistance( false, StringUtils.EMPTY, false, StringUtils.EMPTY, false, false, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, true, misraConfigFile.getPath(), StringUtils.EMPTY, null, false, false, StringUtils.EMPTY, StringUtils.EMPTY, null, false ); CoverityPublisher publisher = new CoverityPublisher( null, invocationAssistance, false, false, false, false, false, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, null, null, null ); ICommand covAnalyzeCommand = new CovAnalyzeCommand(build, launcher, listener, publisher, StringUtils.EMPTY, envVars); setExpectedArguments(new String[] {"cov-analyze", "--dir", "TestDir", "--misra-config", misraConfigFile.getPath()}); covAnalyzeCommand.runCommand(); consoleLogger.verifyLastMessage("[Coverity] cov-analyze command line arguments: " + actualArguments.toString()); }finally { misraConfigFile.delete(); } ======= misraConfigFile.createNewFile(); InvocationAssistance invocationAssistance = new InvocationAssistance( false, StringUtils.EMPTY, false, StringUtils.EMPTY, false, false, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, true, misraConfigFile.getPath(), StringUtils.EMPTY, null, false, false, StringUtils.EMPTY, StringUtils.EMPTY, null, false ); CoverityPublisher publisher = new CoverityPublisher( null, invocationAssistance, false, false, false, false, false, null, null ); CovCommand covAnalyzeCommand = new CovAnalyzeCommand(build, launcher, listener, publisher, StringUtils.EMPTY, envVars); List<String> covAnalyzeArguments = covAnalyzeCommand.getCommandLines(); assertEquals(5, covAnalyzeArguments.size()); checkCommandLineArg(covAnalyzeArguments, "cov-analyze"); checkCommandLineArg(covAnalyzeArguments, "--dir"); checkCommandLineArg(covAnalyzeArguments, "TestDir"); checkCommandLineArg(covAnalyzeArguments, "--misra-config"); checkCommandLineArg(covAnalyzeArguments, misraConfigFile.getPath()); assertEquals(0, covAnalyzeArguments.size()); misraConfigFile.delete(); >>>>>>> try{ misraConfigFile.createNewFile(); InvocationAssistance invocationAssistance = new InvocationAssistance( false, StringUtils.EMPTY, false, StringUtils.EMPTY, false, false, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, true, misraConfigFile.getPath(), StringUtils.EMPTY, null, false, false, StringUtils.EMPTY, StringUtils.EMPTY, null, false ); CoverityPublisher publisher = new CoverityPublisher( null, invocationAssistance, false, false, false, false, false, null, null ); ICommand covAnalyzeCommand = new CovAnalyzeCommand(build, launcher, listener, publisher, StringUtils.EMPTY, envVars); setExpectedArguments(new String[] {"cov-analyze", "--dir", "TestDir", "--misra-config", misraConfigFile.getPath()}); covAnalyzeCommand.runCommand(); consoleLogger.verifyLastMessage("[Coverity] cov-analyze command line arguments: " + actualArguments.toString()); }finally { misraConfigFile.delete(); } <<<<<<< StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, null, null, null ======= taOptionBlock, null >>>>>>> null, null
<<<<<<< ======= import org.springframework.data.repository.core.RepositoryMetadata; import org.springframework.data.rest.core.mapping.ResourceMapping; import org.springframework.data.rest.core.mapping.ResourceMetadata; import org.springframework.web.bind.annotation.RequestMethod; >>>>>>> <<<<<<< final List<RequestHandler> handlers = new ArrayList<>(); final PersistentProperty<?> property = context.getAssociation().getInverse(); final String propertyIdentifier = propertyIdentifierName(property); final String mappingPath = context.associationMetadata() .map(metadata -> metadata.getMappingFor(property)) .map(mapping -> mapping.getPath()) .map(p -> p.toString()) .orElse(""); final String path = String.format("%s%s/{id}/%s/{%s}", context.getEntityContext().basePath(), context.getEntityContext().resourcePath(), mappingPath, propertyIdentifier); ======= List<RequestHandler> handlers = new ArrayList<>(); ResourceMetadata metadata = context.associationMetadata(); Association<? extends PersistentProperty<?>> association = context.getAssociation(); PersistentProperty<?> property = association.getInverse(); ResourceMapping mapping = metadata.getMappingFor(property); EntityContext entityContext = context.getEntityContext(); PersistentEntity entity = entityContext.entity(); TypeResolver resolver = entityContext.getTypeResolver(); RepositoryMetadata repository = entityContext.getRepositoryMetadata(); >>>>>>> final List<RequestHandler> handlers = new ArrayList<>(); final PersistentProperty<?> property = context.getAssociation().getInverse(); final String propertyIdentifier = propertyIdentifierName(property); final String mappingPath = context.associationMetadata() .map(metadata -> metadata.getMappingFor(property)) .map(mapping -> mapping.getPath()) .map(p -> p.toString()) .orElse(""); final String path = String.format("%s%s/{id}/%s/{%s}", context.getEntityContext().basePath(), context.getEntityContext().resourcePath(), mappingPath, propertyIdentifier); <<<<<<< SpecificationBuilder.getInstance(context, path) .supportsMethod(GET) .consumes(HAL_JSON) .withParameter(ID) .withParameter(ITEM) .build() .map(getPropertyItem -> new SpringDataRestRequestHandler(context.getEntityContext(), getPropertyItem)) .ifPresent(handlers::add); ======= String propertyIdentifier = propertyIdentifierName(property); ActionSpecification getPropertyItem = new ActionSpecification( String.format("%s%s", lowerCamelCaseName(entity.getType().getSimpleName()), upperCamelCaseName(property.getName())), String.format("%s%s/{id}/%s/{%s}", entityContext.basePath(), entityContext.resourcePath(), mapping.getPath(), propertyIdentifier), singleton(RequestMethod.GET), new HashSet<>(), singleton(HAL_JSON), null, Stream.of(new ResolvedMethodParameter( 0, "id", pathAnnotations("id"), resolver.resolve(repository.getIdType())), new ResolvedMethodParameter( 1, propertyIdentifier, pathAnnotations(propertyIdentifier), resolver.resolve(String.class))).collect(toList()), propertyItemResponse(property, resolver)); handlers.add(new SpringDataRestRequestHandler(entityContext, getPropertyItem)); >>>>>>> SpecificationBuilder.getInstance(context, path) .supportsMethod(GET) .consumes(HAL_JSON) .withParameter(ID) .withParameter(ITEM) .build() .map(getPropertyItem -> new SpringDataRestRequestHandler(context.getEntityContext(), getPropertyItem)) .ifPresent(handlers::add);
<<<<<<< ======= import com.fasterxml.classmate.ResolvedType; import com.fasterxml.classmate.TypeResolver; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.repository.query.Param; import org.springframework.data.rest.core.mapping.MethodResourceMapping; import org.springframework.data.rest.core.mapping.SearchResourceMappings; import org.springframework.hateoas.Resource; import org.springframework.hateoas.Resources; import org.springframework.web.bind.annotation.RequestMethod; >>>>>>> <<<<<<< ======= final List<RequestHandler> handlers = new ArrayList<>(); final PersistentEntity<?, ?> entity = context.entity(); HandlerMethodResolver methodResolver = new HandlerMethodResolver(context.getTypeResolver()); SearchResourceMappings searchMappings = context.searchMappings(); for (MethodResourceMapping mapping : searchMappings.getExportedMappings()) { HandlerMethod handler = new HandlerMethod( new OptionalDeferencer<>().convert(context.getRepositoryInstance()), mapping.getMethod()); ActionSpecification spec = new ActionSpecification( actionName(entity, mapping.getMethod()), String.format("%s%s/search%s", context.basePath(), context.resourcePath(), mapping.getPath()), singleton(RequestMethod.GET), new HashSet<>(), new HashSet<>(), handler, transferResolvedMethodParameterList(methodResolver.methodParameters(handler)), inferReturnType(methodResolver, handler, context.getTypeResolver())); handlers.add(new SpringDataRestRequestHandler(context, spec)); } return handlers; } >>>>>>>
<<<<<<< ======= import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.PersistentProperty; >>>>>>> <<<<<<< final List<RequestHandler> handlers = new ArrayList<>(); ======= final List<RequestHandler> handlers = new ArrayList<>(); final PersistentEntity<?, ?> entity = context.entity(); >>>>>>> final List<RequestHandler> handlers = new ArrayList<>();
<<<<<<< import java.util.Collections; ======= import java.util.Collection; import java.util.LinkedHashMap; >>>>>>> import java.util.Collection; <<<<<<< Multimap<String, ApiListing> apiListingMap = LinkedListMultimap.create(); Map<ResourceGroup, Map<String, Model>> models = apiModelReader.read(context); ======= final Multimap<String, ApiListing> apiListingMap = LinkedListMultimap.create(); >>>>>>> final Multimap<String, ApiListing> apiListingMap = LinkedListMultimap.create(); final Map<ResourceGroup, Map<String, Model>> models = apiModelReader.read(context); <<<<<<< = context.getRequestMappingsByResourceGroup(); ======= = context.getRequestMappingsByResourceGroup(); Collection<ApiDescription> additionalListings = pluginsManager.additionalListings(context); Set<ResourceGroup> allResourceGroups = FluentIterable.from(collectResourceGroups(additionalListings)) .append(requestMappingsByResourceGroup.keySet()) .toSet(); >>>>>>> = context.getRequestMappingsByResourceGroup(); Collection<ApiDescription> additionalListings = pluginsManager.additionalListings(context); Set<ResourceGroup> allResourceGroups = FluentIterable.from(collectResourceGroups(additionalListings)) .append(requestMappingsByResourceGroup.keySet()) .toSet(); <<<<<<< for (RequestMappingContext each : sortedByMethods(requestMappingsByResourceGroup.get(resourceGroup))) { ======= Map<String, Model> models = new LinkedHashMap<String, Model>(); List<RequestMappingContext> requestMappings = nullToEmptyList(requestMappingsByResourceGroup.get(resourceGroup)); for (RequestMappingContext each : sortedByMethods(requestMappings)) { models.putAll(apiModelReader.read(each.withKnownModels(models))); >>>>>>> List<RequestMappingContext> requestMappings = nullToEmptyList(requestMappingsByResourceGroup.get(resourceGroup)); for (RequestMappingContext each : sortedByMethods(requestMappings)) { <<<<<<< .apiVersion(documentationContext.getApiInfo().getVersion()) .basePath(adjuster.adjustedPath(basePath)) .resourcePath(resourcePath) .produces(produces) .consumes(consumes) .host(host) .protocols(protocols) .securityReferences(securityReferences) .apis(sortedApis) .models(models.get(resourceGroup)) .position(position++) .availableTags(documentationContext.getTags()); ======= .apiVersion(documentationContext.getApiInfo().getVersion()) .basePath(adjuster.adjustedPath(basePath)) .resourcePath(resourcePath) .produces(produces) .consumes(consumes) .host(host) .protocols(protocols) .securityReferences(securityReferences) .apis(sortedApis) .models(models) .position(position++) .availableTags(documentationContext.getTags()); >>>>>>> .apiVersion(documentationContext.getApiInfo().getVersion()) .basePath(adjuster.adjustedPath(basePath)) .resourcePath(resourcePath) .produces(produces) .consumes(consumes) .host(host) .protocols(protocols) .securityReferences(securityReferences) .apis(sortedApis) .models(models.get(resourceGroup)) .position(position++) .availableTags(documentationContext.getTags()); <<<<<<< static Optional<String> longestCommonPath(List<ApiDescription> apiDescriptions) { List<String> commons = newArrayList(); if (null == apiDescriptions || apiDescriptions.isEmpty()) { return Optional.absent(); } List<String> firstWords = urlParts(apiDescriptions.get(0)); for (int position = 0; position < firstWords.size(); position++) { String word = firstWords.get(position); boolean allContain = true; for (int i = 1; i < apiDescriptions.size(); i++) { List<String> words = urlParts(apiDescriptions.get(i)); if (words.size() < position + 1 || !words.get(position).equals(word)) { allContain = false; break; } } if (allContain) { commons.add(word); } } Joiner joiner = Joiner.on("/").skipNulls(); return Optional.of("/" + joiner.join(commons)); } static List<String> urlParts(ApiDescription apiDescription) { return Splitter.on('/') .omitEmptyStrings() .trimResults() .splitToList(apiDescription.getPath()); } ======= >>>>>>>
<<<<<<< import org.reflections.Reflections; ======= import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.Sets; import io.github.lukehutch.fastclasspathscanner.FastClasspathScanner; >>>>>>> import io.github.lukehutch.fastclasspathscanner.FastClasspathScanner; <<<<<<< import java.util.ArrayList; ======= import java.util.HashSet; >>>>>>> import java.util.ArrayList; import java.util.HashSet; <<<<<<< List<AlternateTypeRule> rules = new ArrayList<>(); Reflections reflections = new Reflections(packagePrefix); Set<Class<?>> serialized = reflections.getTypesAnnotatedWith(JsonSerialize.class); Set<Class<?>> deserialized = reflections.getTypesAnnotatedWith(JsonDeserialize.class); Stream.concat(serialized.stream(), deserialized.stream()).forEachOrdered(type -> { ======= List<AlternateTypeRule> rules = newArrayList(); Set<Class<?>> serialized = new HashSet<>(); Set<Class<?>> deserialized = new HashSet<>(); new FastClasspathScanner(packagePrefix).matchClassesWithAnnotation(JsonSerialize.class, serialized::add) .matchClassesWithAnnotation(JsonDeserialize.class, deserialized::add) .scan(); for (Class<?> type : Sets.union(serialized, deserialized)) { >>>>>>> List<AlternateTypeRule> rules = new ArrayList<>(); Set<Class<?>> serialized = new HashSet<>(); Set<Class<?>> deserialized = new HashSet<>(); new FastClasspathScanner(packagePrefix) .matchClassesWithAnnotation(JsonSerialize.class, serialized::add) .matchClassesWithAnnotation(JsonDeserialize.class, deserialized::add) .scan(); Stream.concat(serialized.stream(), deserialized.stream()) .forEachOrdered(type -> {
<<<<<<< import java.util.Optional; import java.util.function.Function; ======= import java.util.Map; >>>>>>> import java.util.Map; import java.util.Optional; import java.util.function.Function;
<<<<<<< import java.io.*; import java.util.Arrays; import java.util.HashMap; ======= import java.util.*; >>>>>>> import java.io.*; import java.util.*;
<<<<<<< _dinfo = new DataInfo(fr, 1, use_all_factor_levels || lambda_search, standardize,false); _activeData = _dinfo; ======= _dinfo = new DataInfo(fr, 1, use_all_factor_levels || lambda_search, standardize ? DataInfo.TransformType.STANDARDIZE : DataInfo.TransformType.NONE, DataInfo.TransformType.NONE); >>>>>>> _dinfo = new DataInfo(fr, 1, use_all_factor_levels || lambda_search, standardize ? DataInfo.TransformType.STANDARDIZE : DataInfo.TransformType.NONE, DataInfo.TransformType.NONE); _activeData = _dinfo;
<<<<<<< if(effectiveIA.getIsUsingMisra() && (this.version.compareMajor(7) == 0)){ cmd.add("--misra-only"); } else if(effectiveIA.getCommitArguments() != null) { cmd.addAll(EnvParser.tokenize(effectiveIA.getCommitArguments())); ======= if(effectiveIA.getCommitArguments() != null) { for(String arg : effectiveIA.getCommitArguments().split(" ")) { cmd.add(arg); } >>>>>>> if(effectiveIA.getCommitArguments() != null) { cmd.addAll(EnvParser.tokenize(effectiveIA.getCommitArguments()));
<<<<<<< FVecTest.makeByteVec(raw, "x,y\n0,0\n1,0.1\n2,0.2\n3,0.3\n4,0.4\n5,0.5\n6,0.6\n7,0.7\n8,0.8\n9,0.9"); Frame fr = ParseDataset2.parse(parsed, new Key[]{raw}); new GLM2("GLM test of gaussian(linear) regression.",model,fr,false,Family.gaussian, Family.gaussian.defaultLink,0,0).fork().get(); GLMModel m = DKV.get(model).get(); HashMap<String, Double> coefs = m.coefficients(); ======= Key k = FVecTest.makeByteVec(raw, "x,y\n0,0\n1,0.1\n2,0.2\n3,0.3\n4,0.4\n5,0.5\n6,0.6\n7,0.7\n8,0.8\n9,0.9"); Frame fr = ParseDataset2.parse(parsed, new Key[]{k}); new GLM2("GLM test of gaussian(linear) regression.",modelKey,fr,false,Family.gaussian, Family.gaussian.defaultLink,0,0).fork().get(); model = DKV.get(modelKey).get(); HashMap<String, Double> coefs = model.coefficients(); >>>>>>> FVecTest.makeByteVec(raw, "x,y\n0,0\n1,0.1\n2,0.2\n3,0.3\n4,0.4\n5,0.5\n6,0.6\n7,0.7\n8,0.8\n9,0.9"); Frame fr = ParseDataset2.parse(parsed, new Key[]{raw}); new GLM2("GLM test of gaussian(linear) regression.",modelKey,fr,false,Family.gaussian, Family.gaussian.defaultLink,0,0).fork().get(); model = DKV.get(modelKey).get(); HashMap<String, Double> coefs = model.coefficients(); <<<<<<< FVecTest.makeByteVec(raw, "x,y\n0,2\n1,4\n2,8\n3,16\n4,32\n5,64\n6,128\n7,256"); Frame fr = ParseDataset2.parse(parsed, new Key[]{raw}); new GLM2("GLM test of poisson regression.",model,fr,false,Family.poisson, Family.poisson.defaultLink,0,0).fork().get(); GLMModel m = DKV.get(model).get(); for(double c:m.beta())assertEquals(Math.log(2),c,1e-4); ======= Key k = FVecTest.makeByteVec(raw, "x,y\n0,2\n1,4\n2,8\n3,16\n4,32\n5,64\n6,128\n7,256"); Frame fr = ParseDataset2.parse(parsed, new Key[]{k}); new GLM2("GLM test of poisson regression.",modelKey,fr,false,Family.poisson, Family.poisson.defaultLink,0,0).fork().get(); model = DKV.get(modelKey).get(); for(double c:model.beta())assertEquals(Math.log(2),c,1e-4); >>>>>>> FVecTest.makeByteVec(raw, "x,y\n0,2\n1,4\n2,8\n3,16\n4,32\n5,64\n6,128\n7,256"); Frame fr = ParseDataset2.parse(parsed, new Key[]{raw}); new GLM2("GLM test of poisson regression.",modelKey,fr,false,Family.poisson, Family.poisson.defaultLink,0,0).fork().get(); model = DKV.get(modelKey).get(); for(double c:model.beta())assertEquals(Math.log(2),c,1e-4); <<<<<<< UKV.remove(raw); FVecTest.makeByteVec(raw, "x,y\n1,0\n2,1\n3,2\n4,3\n5,1\n6,4\n7,9\n8,18\n9,23\n10,31\n11,20\n12,25\n13,37\n14,45\n"); fr = ParseDataset2.parse(parsed, new Key[]{raw}); new GLM2("GLM test of poisson regression(2).",model,fr,false,Family.poisson, Family.poisson.defaultLink,0,0).fork().get(); m = DKV.get(model).get(); assertEquals(0.3396,m.beta()[1],1e-4); assertEquals(0.2565,m.beta()[0],1e-4); ======= model.delete(); k = FVecTest.makeByteVec(raw, "x,y\n1,0\n2,1\n3,2\n4,3\n5,1\n6,4\n7,9\n8,18\n9,23\n10,31\n11,20\n12,25\n13,37\n14,45\n"); fr = ParseDataset2.parse(parsed, new Key[]{k}); new GLM2("GLM test of poisson regression(2).",modelKey,fr,false,Family.poisson, Family.poisson.defaultLink,0,0).fork().get(); model = DKV.get(modelKey).get(); assertEquals(0.3396,model.beta()[1],1e-4); assertEquals(0.2565,model.beta()[0],1e-4); >>>>>>> model.delete(); UKV.remove(raw); FVecTest.makeByteVec(raw, "x,y\n1,0\n2,1\n3,2\n4,3\n5,1\n6,4\n7,9\n8,18\n9,23\n10,31\n11,20\n12,25\n13,37\n14,45\n"); fr = ParseDataset2.parse(parsed, new Key[]{raw}); new GLM2("GLM test of poisson regression(2).",modelKey,fr,false,Family.poisson, Family.poisson.defaultLink,0,0).fork().get(); model = DKV.get(modelKey).get(); assertEquals(0.3396,model.beta()[1],1e-4); assertEquals(0.2565,model.beta()[0],1e-4);
<<<<<<< public final void compute2() { ======= @Override public final void compute() { >>>>>>> @Override public final void compute2() {
<<<<<<< ======= lambda = i == lambda.length?new double [] {lambda_max}:Arrays.copyOfRange(lambda, i, lambda.length); } _model = new GLMModel(GLM2.this, self(),dest(),_dinfo, _glm,beta_epsilon,alpha[0],lambda_max,lambda,ymut.ymu()); _model.warnings = warns; _model.clone().delete_and_lock(self()); if(lambda[0] == lambda_max && alpha[0] > 0){ // fill-in trivial solution for lambda max _beta = MemoryManager.malloc8d(_dinfo.fullN()+1); _beta[_beta.length-1] = _glm.link(ymut.ymu()); _model.setLambdaSubmodel(0,_beta,_beta,0); t._val.finalize_AIC_AUC(); _model.setAndTestValidation(0,t._val); _lambdaIdx = 1; >>>>>>>
<<<<<<< C1Chunk( byte[] bs ) { super(0xFF); _mem=bs; _start = -1; _len = _mem.length; } @Override public long at8_impl( int i ) { return 0xFF&_mem[i+OFF]; } @Override public double atd_impl( int i ) { int res = 0xFF&_mem[i+OFF]; return (res == NA())?Double.NaN:res; ======= static protected final long _NA = 0xFF; C1Chunk(byte[] bs) { _mem=bs; _start = -1; _len = _mem.length; } @Override public long get ( int i ) { long res = 0xFF&_mem[i+OFF]; assert (res == _NA) || !_vec.isNA(res); return (res == _NA)?_vec._iNA:res; } @Override public double getd( int i ) { long res = 0xFF&_mem[i+OFF]; assert (res == _NA) || !_vec.isNA((double)res); return (res == _NA)?_vec._fNA:res; >>>>>>> static protected final long _NA = 0xFF; C1Chunk(byte[] bs) { _mem=bs; _start = -1; _len = _mem.length; } @Override protected final long at8_impl( int i ) { long res = 0xFF&_mem[i+OFF]; return (res == _NA)?_vec._iNA:res; } @Override protected final double atd_impl( int i ) { long res = 0xFF&_mem[i+OFF]; return (res == _NA)?_vec._fNA:res;
<<<<<<< @Test public void doFillToolInstallationNameItems_returnsInstallations() { PowerMockito.mockStatic(SaveableListener.class); final DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); ListBoxModel installationNameItems = descriptor.doFillToolInstallationNameItems(); assertNotNull(installationNameItems); assertEquals(0, installationNameItems.size()); descriptor.setInstallations( new CoverityToolInstallation(CoverityToolInstallation.DEFAULT_NAME, "default/path/to/coverity"), new CoverityToolInstallation("Additional install", "alternate/path/to/coverity")); installationNameItems = descriptor.doFillToolInstallationNameItems(); assertNotNull(installationNameItems); assertEquals(2, installationNameItems.size()); assertEquals(CoverityToolInstallation.DEFAULT_NAME, installationNameItems.get(0).name); assertEquals(CoverityToolInstallation.DEFAULT_NAME, installationNameItems.get(0).value); assertEquals("Additional install", installationNameItems.get(1).name); assertEquals("Additional install", installationNameItems.get(1).value); } ======= @Test public void doCheckPostCovBuildCmdTest(){ final CoverityPublisher.DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); FormValidation result = descriptor.doCheckPostCovBuildCmd(StringUtils.EMPTY); assertEquals(result.kind, FormValidation.Kind.ERROR); assertEquals(result.getMessage(), "Post cov-build command cannot be empty!"); result = descriptor.doCheckPostCovBuildCmd("Test Post cov-build command"); assertEquals(result.kind, FormValidation.Kind.OK); } @Test public void doCheckPostAnalyzeCmdTest(){ final CoverityPublisher.DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); FormValidation result = descriptor.doCheckPostCovAnalyzeCmd(StringUtils.EMPTY); assertEquals(result.kind, FormValidation.Kind.ERROR); assertEquals(result.getMessage(), "Post cov-analyze command cannot be empty!"); result = descriptor.doCheckPostCovAnalyzeCmd("Test Post cov-analyze command"); assertEquals(result.kind, FormValidation.Kind.OK); } @Test public void doCheckUserTest(){ final CoverityPublisher.DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); FormValidation result = descriptor.doCheckUser(StringUtils.EMPTY); assertEquals(result.kind, FormValidation.Kind.OK); result = descriptor.doCheckUser("TestUser"); assertEquals(result.kind, FormValidation.Kind.WARNING); assertEquals(result.getMessage(), "User is deprecated in Coverity plugin version 1.10 and later. Please use Credentials above for more secure username."); } @Test public void doCheckPasswordTest(){ final CoverityPublisher.DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); FormValidation result = descriptor.doCheckPassword(StringUtils.EMPTY); assertEquals(result.kind, FormValidation.Kind.OK); result = descriptor.doCheckPassword("TestPassword"); assertEquals(result.kind, FormValidation.Kind.WARNING); assertEquals(result.getMessage(), "Password is deprecated in Coverity plugin version 1.10 and later. Please use Credentials above for more secure password."); } >>>>>>> @Test public void doFillToolInstallationNameItems_returnsInstallations() { PowerMockito.mockStatic(SaveableListener.class); final DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); ListBoxModel installationNameItems = descriptor.doFillToolInstallationNameItems(); assertNotNull(installationNameItems); assertEquals(0, installationNameItems.size()); descriptor.setInstallations( new CoverityToolInstallation(CoverityToolInstallation.DEFAULT_NAME, "default/path/to/coverity"), new CoverityToolInstallation("Additional install", "alternate/path/to/coverity")); installationNameItems = descriptor.doFillToolInstallationNameItems(); assertNotNull(installationNameItems); assertEquals(2, installationNameItems.size()); assertEquals(CoverityToolInstallation.DEFAULT_NAME, installationNameItems.get(0).name); assertEquals(CoverityToolInstallation.DEFAULT_NAME, installationNameItems.get(0).value); assertEquals("Additional install", installationNameItems.get(1).name); assertEquals("Additional install", installationNameItems.get(1).value); } @Test public void doCheckPostCovBuildCmdTest(){ final CoverityPublisher.DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); FormValidation result = descriptor.doCheckPostCovBuildCmd(StringUtils.EMPTY); assertEquals(result.kind, FormValidation.Kind.ERROR); assertEquals(result.getMessage(), "Post cov-build command cannot be empty!"); result = descriptor.doCheckPostCovBuildCmd("Test Post cov-build command"); assertEquals(result.kind, FormValidation.Kind.OK); } @Test public void doCheckPostAnalyzeCmdTest(){ final CoverityPublisher.DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); FormValidation result = descriptor.doCheckPostCovAnalyzeCmd(StringUtils.EMPTY); assertEquals(result.kind, FormValidation.Kind.ERROR); assertEquals(result.getMessage(), "Post cov-analyze command cannot be empty!"); result = descriptor.doCheckPostCovAnalyzeCmd("Test Post cov-analyze command"); assertEquals(result.kind, FormValidation.Kind.OK); } @Test public void doCheckUserTest(){ final CoverityPublisher.DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); FormValidation result = descriptor.doCheckUser(StringUtils.EMPTY); assertEquals(result.kind, FormValidation.Kind.OK); result = descriptor.doCheckUser("TestUser"); assertEquals(result.kind, FormValidation.Kind.WARNING); assertEquals(result.getMessage(), "User is deprecated in Coverity plugin version 1.10 and later. Please use Credentials above for more secure username."); } @Test public void doCheckPasswordTest(){ final CoverityPublisher.DescriptorImpl descriptor = new CoverityPublisher.DescriptorImpl(); FormValidation result = descriptor.doCheckPassword(StringUtils.EMPTY); assertEquals(result.kind, FormValidation.Kind.OK); result = descriptor.doCheckPassword("TestPassword"); assertEquals(result.kind, FormValidation.Kind.WARNING); assertEquals(result.getMessage(), "Password is deprecated in Coverity plugin version 1.10 and later. Please use Credentials above for more secure password."); }
<<<<<<< Request.addToNavbar(registerRequest(new KMeans2()), "KMeans2" ,"Beta (FluidVecs!)"); ======= Request.addToNavbar(registerRequest(new Inspect2()), "Inspect", "Beta (FluidVecs!)"); >>>>>>> Request.addToNavbar(registerRequest(new Inspect2()), "Inspect", "Beta (FluidVecs!)"); Request.addToNavbar(registerRequest(new KMeans2()), "KMeans2" ,"Beta (FluidVecs!)");
<<<<<<< public GBMModel(DTree.TreeModel prior, double err, ConfusionMatrix cm, ConfusionMatrix[] auccms) { super(prior, err, cm, null, null, auccms); ======= public GBMModel(DTree.TreeModel prior, double err, ConfusionMatrix cm, water.api.AUC validAUC) { super(prior, err, cm, validAUC); >>>>>>> public GBMModel(DTree.TreeModel prior, double err, ConfusionMatrix cm, water.api.AUC validAUC) { super(prior, err, cm, null, null, validAUC); <<<<<<< @Override protected GBMModel makeModel( GBMModel model, double err, ConfusionMatrix cm, float[] varimp, float[] varimpSD, ConfusionMatrix[] auccms) { return new GBMModel(model, err, cm, auccms); ======= @Override protected GBMModel makeModel( GBMModel model, double err, ConfusionMatrix cm, ConfusionMatrix[] auccms) { return new GBMModel(model, err, cm, auccms!=null ? new water.api.AUC(auccms, ModelUtils.DEFAULT_THRESHOLDS) : null); >>>>>>> @Override protected GBMModel makeModel( GBMModel model, double err, ConfusionMatrix cm, float[] varimp, float[] varimpSD, ConfusionMatrix[] auccms) { return new GBMModel(model, err, cm, auccms!=null ? new water.api.AUC(auccms, ModelUtils.DEFAULT_THRESHOLDS) : null);
<<<<<<< @Override protected Status exec() { ======= @Override protected void logStart() { Log.info("Starting GBM model build..."); super.logStart(); Log.info(" learn_rate: " + learn_rate); } @Override protected JobState exec() { >>>>>>> @Override protected JobState exec() {