conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.testsuite.project.ProjectOperations;
=======
>>>>>>>
import com.google.gerrit.acceptance.testsuite.project.ProjectOperations;
<<<<<<<
// clone with user to avoid inherited tag permissions of admin user
testRepo = cloneProject(project, user);
initialHead = projectOperations.project(project).getHead("master");
=======
initialHead = getRemoteHead();
>>>>>>>
initialHead = projectOperations.project(project).getHead("master"); |
<<<<<<<
=======
import som.vmobjects.SAbstractObject;
import som.vmobjects.SObject;
>>>>>>>
import som.vmobjects.SObject;
<<<<<<<
public abstract Object getArgument(final int i);
=======
public FrameOnStackMarker getFrameOnStackMarker() {
return onStackMarker;
}
public Object[] getUpvalues() {
return upvalues;
}
public abstract SAbstractObject getArgument(final int i);
>>>>>>>
public abstract Object getArgument(final int i);
public FrameOnStackMarker getFrameOnStackMarker() {
return onStackMarker;
}
public Object[] getUpvalues() {
return upvalues;
}
<<<<<<<
public UnaryArguments(final Object self) {
super(self);
=======
public UnaryArguments(final SAbstractObject self, final int numUpvalues, final SObject nilObject) {
super(self, numUpvalues, nilObject);
>>>>>>>
public UnaryArguments(final Object self, final int numUpvalues, final SObject nilObject) {
super(self, numUpvalues, nilObject);
<<<<<<<
private final Object arg;
public BinaryArguments(final Object self, final Object arg) {
super(self);
=======
private final SAbstractObject arg;
public BinaryArguments(final SAbstractObject self, final SAbstractObject arg, final int numUpvalues, final SObject nilObject) {
super(self, numUpvalues, nilObject);
>>>>>>>
private final Object arg;
public BinaryArguments(final Object self, final Object arg,
final int numUpvalues, final SObject nilObject) {
super(self, numUpvalues, nilObject);
<<<<<<<
public TernaryArguments(final Object self, final Object arg1,
final Object arg2) {
super(self);
=======
public TernaryArguments(final SAbstractObject self,
final SAbstractObject arg1,
final SAbstractObject arg2,
final int numUpvalues, final SObject nilObject) {
super(self, numUpvalues, nilObject);
>>>>>>>
public TernaryArguments(final Object self, final Object arg1,
final Object arg2, final int numUpvalues, final SObject nilObject) {
super(self, numUpvalues, nilObject);
<<<<<<<
public KeywordArguments(final Object self, final Object[] arguments) {
super(self);
=======
public KeywordArguments(final SAbstractObject self,
final SAbstractObject[] arguments,
final int numUpvalues, final SObject nilObject) {
super(self, numUpvalues, nilObject);
>>>>>>>
public KeywordArguments(final Object self, final Object[] arguments,
final int numUpvalues, final SObject nilObject) {
super(self, numUpvalues, nilObject); |
<<<<<<<
protected static Object messageSendExecution(final FrameOnStackMarker marker,
final VirtualFrame frame,
=======
protected static SAbstractObject messageSendExecution(final VirtualFrame frame,
>>>>>>>
protected static Object messageSendExecution(final VirtualFrame frame,
<<<<<<<
Object result;
=======
FrameOnStackMarker marker = Arguments.get(frame).getFrameOnStackMarker();
SAbstractObject result;
>>>>>>>
FrameOnStackMarker marker = Arguments.get(frame).getFrameOnStackMarker();
Object result;
<<<<<<<
UnaryArguments args = new UnaryArguments(receiver);
=======
UnaryArguments args = new UnaryArguments((SAbstractObject) receiver, numUpvalues, universe.nilObject);
>>>>>>>
UnaryArguments args = new UnaryArguments(receiver, numUpvalues, universe.nilObject);
<<<<<<<
BinaryArguments args = new BinaryArguments(receiver, argument);
=======
BinaryArguments args = new BinaryArguments((SAbstractObject) receiver,
(SAbstractObject) argument, numUpvalues, universe.nilObject);
>>>>>>>
BinaryArguments args = new BinaryArguments(receiver, argument, numUpvalues, universe.nilObject); |
<<<<<<<
public Object doSBlock(final VirtualFrame frame, final SBlock receiver) {
return receiver.getMethod().invoke(frame.pack(), receiver);
=======
public SAbstractObject doSBlock(final VirtualFrame frame, final SBlock receiver) {
return receiver.getMethod().invoke(frame.pack(), receiver, universe);
>>>>>>>
public Object doSBlock(final VirtualFrame frame, final SBlock receiver) {
return receiver.getMethod().invoke(frame.pack(), receiver, universe);
<<<<<<<
public Object doSBlock(final VirtualFrame frame,
final SBlock receiver, final Object arg) {
return receiver.getMethod().invoke(frame.pack(), receiver, arg);
=======
public SAbstractObject doSBlock(final VirtualFrame frame,
final SBlock receiver, final SAbstractObject arg) {
return receiver.getMethod().invoke(frame.pack(), receiver, arg, universe);
>>>>>>>
public Object doSBlock(final VirtualFrame frame, final SBlock receiver,
final Object arg) {
return receiver.getMethod().invoke(frame.pack(), receiver, arg, universe);
<<<<<<<
public Object doSBlock(final VirtualFrame frame,
final SBlock receiver, final Object arg1, final Object arg2) {
return receiver.getMethod().invoke(frame.pack(), receiver, arg1, arg2);
=======
public SAbstractObject doSBlock(final VirtualFrame frame,
final SBlock receiver, final SAbstractObject arg1, final SAbstractObject arg2) {
return receiver.getMethod().invoke(frame.pack(), receiver, arg1, arg2, universe);
>>>>>>>
public Object doSBlock(final VirtualFrame frame,
final SBlock receiver, final Object arg1, final Object arg2) {
return receiver.getMethod().invoke(frame.pack(), receiver, arg1, arg2, universe);
<<<<<<<
return receiver.getMethod().invoke(frame.pack(), receiver, arguments);
=======
return receiver.getMethod().invoke(frame.pack(), receiver, (SAbstractObject[]) arguments, universe);
>>>>>>>
return receiver.getMethod().invoke(frame.pack(), receiver, arguments, universe); |
<<<<<<<
if (receiver == universe.trueObject) {
=======
Arguments context;
if (receiver) {
>>>>>>>
Arguments context;
if (receiver == universe.trueObject) {
<<<<<<<
assert receiver == universe.falseObject;
SMethod branchMethod = falseBlock.getMethod();
SBlock b = universe.newBlock(branchMethod, frame.materialize(), 1);
return branchMethod.invoke(frame.pack(), b);
=======
SMethod branchMethod = falseBlock.getMethod();
Arguments context = falseBlock.getContext(); // TODO: test whether the current implementation is correct, or whether it should be the following: Method.getUpvalues(frame);
SBlock b = universe.newBlock(branchMethod, context);
return branchMethod.invoke(frame.pack(), b, universe);
>>>>>>>
assert receiver == universe.falseObject;
SMethod branchMethod = falseBlock.getMethod();
Arguments context = falseBlock.getContext(); // TODO: test whether the current implementation is correct, or whether it should be the following: Method.getUpvalues(frame);
SBlock b = universe.newBlock(branchMethod, context);
return branchMethod.invoke(frame.pack(), b, universe); |
<<<<<<<
import som.interpreter.Types;
import som.interpreter.nodes.VariableNode.SuperReadNode;
=======
import som.interpreter.nodes.SelfReadNode.SuperReadNode;
>>>>>>>
import som.interpreter.Types;
import som.interpreter.nodes.SelfReadNode.SuperReadNode;
<<<<<<<
return SAbstractObject.sendDoesNotUnderstand(rcvr, selector, args,
universe, frame.pack());
=======
// TODO: mark as exceptional case
return rcvr.sendDoesNotUnderstand(selector, args, universe, frame.pack());
>>>>>>>
// TODO: mark as exceptional case
return SAbstractObject.sendDoesNotUnderstand(rcvr, selector, args,
universe, frame.pack()); |
<<<<<<<
public Editor(IEditorContainer container, FileListField field) {
=======
public Editor(IEditorContainer container, Composite parent, IField<List<File>> field) {
>>>>>>>
public Editor(IEditorContainer container, Composite parent, FileListField field) {
<<<<<<<
this.field = field;
int hspan = container.getColumnCount();
Composite parent = container.getComposite();
=======
int hspan = getColumnCount(parent);
>>>>>>>
this.field = field;
int hspan = getColumnCount(parent); |
<<<<<<<
private final MultiIconButton modeB;
=======
private final MultiIconButtonEIO modeB;
private final Rectangle progressTooltipRect;
private boolean wasSpawnMode;
private String header;
>>>>>>>
private final MultiIconButton modeB;
private final Rectangle progressTooltipRect;
private boolean wasSpawnMode;
private String header;
<<<<<<<
String txt = EnderIO.lang.localize("gui.machine.poweredspawner.spawn");
if(!spawner.isSpawnMode()) {
txt = EnderIO.lang.localize("gui.machine.poweredspawner.capture");
=======
if(spawnMode != wasSpawnMode) {
updateSpawnMode(spawnMode);
>>>>>>>
if(spawnMode != wasSpawnMode) {
updateSpawnMode(spawnMode); |
<<<<<<<
=======
import crazypants.util.BlockCoord;
>>>>>>>
<<<<<<<
String s = EnderIO.lang.localizeExact(t.inv.getInventory().getInventoryName()) + " " + t.inv.location.chatString() + " Distance [" + t.distance + "] ";
=======
String s = t.inv.getLocalizedInventoryName() + " " + t.inv.location.chatString() + " Distance [" + t.distance + "] ";
>>>>>>>
String s = t.inv.getLocalizedInventoryName() + " " + t.inv.location.chatString() + " Distance [" + t.distance + "] ";
<<<<<<<
result.add(EnderIO.lang.localizeExact(inv.getInventory().getInventoryName()) + " " + inv.location.chatString());
=======
result.add(inv.getLocalizedInventoryName() + " " + inv.location.chatString());
>>>>>>>
result.add(inv.getLocalizedInventoryName() + " " + inv.location.chatString()); |
<<<<<<<
ClientRegistry.bindTileEntitySpecialRenderer(TileFarmStation.class, new FarmingStationSpecialRenderer());
// BlockWirelessCharger.renderId = RenderingRegistry.getNextAvailableRenderId();
// RenderingRegistry.registerBlockHandler(new WirelessChargerRenderer());
=======
// BlockWirelessCharger.renderId = RenderingRegistry.getNextAvailableRenderId();
// RenderingRegistry.registerBlockHandler(new WirelessChargerRenderer());
>>>>>>>
ClientRegistry.bindTileEntitySpecialRenderer(TileFarmStation.class, new FarmingStationSpecialRenderer()); |
<<<<<<<
private String inputHeading;
private String outputHeading;
private boolean insertEnabled = false;
private boolean extractEnabled = false;
private final CheckBox extractEnabledB;
private final CheckBox insertEnabledB;
private final boolean hasInputOutputMode;
=======
protected MultiIconButton leftArrow;
protected MultiIconButton rightArrow;
protected @Nonnull String modeLabel = EnderIO.lang.localize("gui.conduit.io_mode");
protected ConnectionMode oldConectionMode;
>>>>>>>
private String inputHeading;
private String outputHeading;
private boolean insertEnabled = false;
private boolean extractEnabled = false;
private final CheckBox extractEnabledB;
private final CheckBox insertEnabledB;
private final boolean hasInputOutputMode;
<<<<<<<
protected BaseSettingsPanel(@Nonnull IconEIO icon, String typeName, @Nonnull GuiExternalConnection gui, @Nonnull IConduit con, @Nonnull String texture) {
this(icon, typeName, gui, con, texture, true);
}
protected BaseSettingsPanel(@Nonnull IconEIO icon, String typeName, @Nonnull GuiExternalConnection gui, @Nonnull IConduit con, @Nonnull String texture,
boolean hasInputOutputMode) {
=======
protected BaseSettingsPanel(@Nonnull IconEIO icon, String typeName, @Nonnull GuiExternalConnection gui, @Nonnull IClientConduit con,
@Nonnull String texture) {
>>>>>>>
protected BaseSettingsPanel(@Nonnull IconEIO icon, String typeName, @Nonnull GuiExternalConnection gui, @Nonnull IConduit con, @Nonnull String texture) {
this(icon, typeName, gui, con, texture, true);
}
protected BaseSettingsPanel(@Nonnull IconEIO icon, String typeName, @Nonnull GuiExternalConnection gui, @Nonnull IConduit con, @Nonnull String texture,
boolean hasInputOutputMode) {
protected BaseSettingsPanel(@Nonnull IconEIO icon, String typeName, @Nonnull GuiExternalConnection gui, @Nonnull IClientConduit con,
@Nonnull String texture) { |
<<<<<<<
=======
import crazypants.enderio.tool.SmartTank;
import crazypants.util.BlockCoord;
import crazypants.util.FluidUtil;
import crazypants.util.ITankAccess;
import crazypants.util.ItemUtil;
import crazypants.util.FluidUtil.FluidAndStackResult;
>>>>>>>
import crazypants.enderio.tool.SmartTank; |
<<<<<<<
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.2.3", dependencies = "required-after:Forge@[9.11.0.883,)")
=======
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.2.4", dependencies = "required-after:Forge@[9.10.0.800,)")
>>>>>>>
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.2.4", dependencies = "required-after:Forge@[9.11.0.883,)") |
<<<<<<<
String s = EnderIO.lang.localizeExact(t.inv.getInventory().getInventoryName()) + " " + t.inv.location + " Distance [" + t.distance + "] ";
=======
String s = Lang.localize(t.inv.getInventory().getInventoryName(), false) + " " + t.inv.location.chatString() + " Distance [" + t.distance + "] ";
>>>>>>>
String s = EnderIO.lang.localizeExact(t.inv.getInventory().getInventoryName()) + " " + t.inv.location.chatString() + " Distance [" + t.distance + "] ";
<<<<<<<
result.add(EnderIO.lang.localizeExact(inv.getInventory().getInventoryName()) + " " + inv.location);
=======
result.add(Lang.localize(inv.getInventory().getInventoryName(), false) + " " + inv.location.chatString());
>>>>>>>
result.add(EnderIO.lang.localizeExact(inv.getInventory().getInventoryName()) + " " + inv.location.chatString()); |
<<<<<<<
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.5.4", dependencies = "required-after:Forge@[9.11.0.883,)")
=======
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.5.5", dependencies = "required-after:Forge@[9.10.0.800,)")
>>>>>>>
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.5.5", dependencies = "required-after:Forge@[9.11.0.883,)") |
<<<<<<<
END_STEEL_MACHINE_CHASSIS("endSteelMachineChassi"),
DYE_ENHANCED_MACHINE("enhanced_machine_dye", "dyeEnhancedMachine"),
=======
DISH("dish", "itemWirelessDish"),
>>>>>>>
DISH("dish", "itemWirelessDish"),
END_STEEL_MACHINE_CHASSIS("endSteelMachineChassi"),
DYE_ENHANCED_MACHINE("enhanced_machine_dye", "dyeEnhancedMachine"), |
<<<<<<<
import static com.enderio.core.common.util.OreDictionaryHelper.*;
import static crazypants.enderio.EnderIO.itemBasicCapacitor;
=======
import static crazypants.enderio.EnderIO.itemBasicCapacitor;
import static crazypants.enderio.material.Alloy.*;
import static crazypants.enderio.material.Material.*;
import static crazypants.util.OreDictionaryHelper.*;
import static crazypants.util.RecipeUtil.*;
>>>>>>>
import static com.enderio.core.common.util.OreDictionaryHelper.*;
import static crazypants.enderio.EnderIO.itemBasicCapacitor;
import static crazypants.enderio.material.Alloy.*;
import static crazypants.enderio.material.Material.*;
import static crazypants.util.RecipeUtil.*;
<<<<<<<
OreDictionary.registerOre("gearStone", new ItemStack(EnderIO.itemMachinePart, 1, MachinePart.BASIC_GEAR.ordinal()));
OreDictionary.registerOre("itemSilicon", new ItemStack(EnderIO.itemMaterial, 1, Material.SILICON.ordinal()));
=======
>>>>>>>
<<<<<<<
ItemStack cbc = binderComposite.copy();
cbc.stackSize = 8;
if (Config.useAlternateBinderRecipe) {
GameRegistry.addShapedRecipe(cbc, "gcg", "sgs", "gcg", 'g', Blocks.gravel, 's', Blocks.sand, 'c', Items.clay_ball);
=======
ItemStack cbc = BINDER_COMPOSITE.getStack(8);
if (Config.useAlternateBinderRecipe) {
addShaped(cbc, "gcg", "sgs", "gcg", 'g', Blocks.gravel, 's', Blocks.sand, 'c', Items.clay_ball);
>>>>>>>
ItemStack cbc = BINDER_COMPOSITE.getStack(8);
if (Config.useAlternateBinderRecipe) {
addShaped(cbc, "gcg", "sgs", "gcg", 'g', Blocks.gravel, 's', Blocks.sand, 'c', Items.clay_ball);
<<<<<<<
if (Config.useSteelInChassi == true && steelIngots != null && !steelIngots.isEmpty()) {
GameRegistry.addRecipe(new ShapedOreRecipe(machineChassi, "fif", "ici", "fif", 'f', Blocks.iron_bars, 'i', "ingotSteel", 'c', capacitor));
} else {
GameRegistry.addShapedRecipe(machineChassi, "fif", "ici", "fif", 'f', Blocks.iron_bars, 'i', Items.iron_ingot, 'c', capacitor);
}
=======
ItemStack chassis = new ItemStack(EnderIO.itemMachinePart, 1, MachinePart.MACHINE_CHASSI.ordinal());
String mat = Config.useSteelInChassi == true && steelIngots != null && !steelIngots.isEmpty() ? "ingotSteel" : "ingotIron";
addShaped(chassis, "fif", "ici", "fif", 'f', Blocks.iron_bars, 'i', mat, 'c', capacitor);
>>>>>>>
ItemStack chassis = new ItemStack(EnderIO.itemMachinePart, 1, MachinePart.MACHINE_CHASSI.ordinal());
String mat = Config.useSteelInChassi == true && steelIngots != null && !steelIngots.isEmpty() ? "ingotSteel" : "ingotIron";
addShaped(chassis, "fif", "ici", "fif", 'f', Blocks.iron_bars, 'i', mat, 'c', capacitor);
<<<<<<<
if (Config.useHardRecipes) {
GameRegistry.addShapedRecipe(enderCapacitor, "eee", "cgc", "eee", 'e', phasedGold, 'c', activatedCapacitor, 'g', Blocks.glowstone);
=======
if (Config.useHardRecipes) {
addShaped(enderCapacitor, "eee", "cgc", "eee", 'e', phasedGold, 'c', activatedCapacitor, 'g', Blocks.glowstone);
>>>>>>>
if (Config.useHardRecipes) {
addShaped(enderCapacitor, "eee", "cgc", "eee", 'e', phasedGold, 'c', activatedCapacitor, 'g', Blocks.glowstone);
<<<<<<<
ItemStack corners = darkSteel;
if (Config.reinforcedObsidianUseDarkSteelBlocks) {
corners = new ItemStack(EnderIO.blockIngotStorage, 1, Alloy.DARK_STEEL.ordinal());
=======
String corners = darkSteel;
if (Config.reinforcedObsidianUseDarkSteelBlocks) {
corners = Alloy.DARK_STEEL.oreBlock;
>>>>>>>
String corners = darkSteel;
if (Config.reinforcedObsidianUseDarkSteelBlocks) {
corners = Alloy.DARK_STEEL.oreBlock;
<<<<<<<
}
public static void addOreDictionaryRecipes() {
if (OreDictionaryHelper.hasCopper()) {
=======
if (hasCopper()) {
>>>>>>>
if (hasCopper()) {
<<<<<<<
Item gold;
if (Config.useHardRecipes) {
gold = Items.gold_ingot;
=======
String gold;
if (Config.useHardRecipes) {
gold = "ingotGold";
>>>>>>>
String gold;
if (Config.useHardRecipes) {
gold = "ingotGold"; |
<<<<<<<
import com.enderio.core.client.gui.button.IconButton;
import com.enderio.core.client.gui.button.MultiIconButton;
import com.enderio.core.client.gui.widget.GhostSlot;
import com.enderio.core.client.gui.widget.GuiToolTip;
import com.enderio.core.client.gui.widget.TextFieldEnder;
import com.enderio.core.client.gui.widget.VScrollbar;
import com.enderio.core.client.handlers.SpecialTooltipHandler;
import com.enderio.core.client.render.EnderWidget;
import com.enderio.core.client.render.RenderUtil;
import com.enderio.core.common.util.ItemUtil;
import cpw.mods.fml.common.Optional;
=======
import codechicken.nei.LayoutManager;
import cpw.mods.fml.common.Loader;
>>>>>>>
import codechicken.nei.LayoutManager;
import com.enderio.core.client.gui.button.IconButton;
import com.enderio.core.client.gui.button.MultiIconButton;
import com.enderio.core.client.gui.button.ToggleButton;
import com.enderio.core.client.gui.widget.GhostSlot;
import com.enderio.core.client.gui.widget.GuiToolTip;
import com.enderio.core.client.gui.widget.TextFieldEnder;
import com.enderio.core.client.gui.widget.VScrollbar;
import com.enderio.core.client.handlers.SpecialTooltipHandler;
import com.enderio.core.client.render.EnderWidget;
import com.enderio.core.client.render.RenderUtil;
import com.enderio.core.common.Lang;
import com.enderio.core.common.util.ItemUtil;
import cpw.mods.fml.common.Loader;
<<<<<<<
=======
import crazypants.enderio.gui.MultiIconButtonEIO;
import crazypants.enderio.gui.TextFieldEIO;
import crazypants.enderio.gui.ToggleButtonEIO;
import crazypants.enderio.gui.TooltipAddera;
import crazypants.enderio.gui.VScrollbarEIO;
>>>>>>>
<<<<<<<
=======
import crazypants.gui.GhostSlot;
import crazypants.gui.GuiToolTip;
import crazypants.render.RenderUtil;
import crazypants.util.ItemUtil;
import crazypants.util.Lang;
>>>>>>>
<<<<<<<
private final TextFieldEnder tfFilter;
private final IconButton btnSort;
=======
private final TextFieldEIO tfFilter;
private final IconButtonEIO btnSort;
private final ToggleButtonEIO btnSync;
>>>>>>>
private final TextFieldEnder tfFilter;
private final IconButton btnSort;
private final ToggleButton btnSync;
<<<<<<<
tfFilter = new TextFieldEnder(fr, 108, 11, 106, 10);
=======
btnSync = new ToggleButtonEIO(this, ID_SYNC, 24 + 233, 46, IconEIO.CROSS, IconEIO.TICK);
btnSync.setToolTip(Lang.localize("gui.inventorypanel.tooltip.sync"));
btnSync.setSelectedToolTip(Lang.localize("gui.enabled"));
btnSync.setUnselectedToolTip(Lang.localize("gui.disabled"));
btnSync.setSelected(getTileEntity().getGuiSync());
if (!Loader.isModLoaded("NotEnoughItems")) {
btnSync.enabled = false;
}
tfFilter = new TextFieldEIO(fr, 24+108, 11, 106, 10);
>>>>>>>
btnSync = new ToggleButton(this, ID_SYNC, 24 + 233, 46, IconEIO.CROSS, IconEIO.TICK);
btnSync.setToolTip(EnderIO.lang.localize("gui.inventorypanel.tooltip.sync"));
btnSync.setSelectedToolTip(EnderIO.lang.localize("gui.enabled"));
btnSync.setUnselectedToolTip(EnderIO.lang.localize("gui.disabled"));
btnSync.setSelected(getTileEntity().getGuiSync());
if (!Loader.isModLoaded("NotEnoughItems")) {
btnSync.enabled = false;
}
tfFilter = new TextFieldEnder(fr, 24+108, 11, 106, 10);
<<<<<<<
tfFilter.setText(te.getGuiFilterString());
btnSort = new IconButton(this, ID_SORT, 233, 27, getSortOrderIcon()) {
=======
setText(tfFilter, te.getGuiFilterString());
btnSort = new IconButtonEIO(this, ID_SORT, 24+233, 27, getSortOrderIcon()) {
>>>>>>>
setText(tfFilter, te.getGuiFilterString());
btnSort = new IconButton(this, ID_SORT, 24+233, 27, getSortOrderIcon()) {
<<<<<<<
scrollbar = new VScrollbar(this, 215, 27, 90);
btnClear = new MultiIconButton(this, ID_CLEAR, 65, 60, EnderWidget.X_BUT, EnderWidget.X_BUT_PRESSED, EnderWidget.X_BUT_HOVER);
=======
scrollbar = new VScrollbarEIO(this, 24+215, 27, 90);
btnClear = new MultiIconButtonEIO(this, ID_CLEAR, 24+65, 60, IconEIO.X_BUT, IconEIO.X_BUT_PRESSED, IconEIO.X_BUT_HOVER);
>>>>>>>
scrollbar = new VScrollbar(this, 24+215, 27, 90);
btnClear = new MultiIconButton(this, ID_CLEAR, 24+65, 60, EnderWidget.X_BUT, EnderWidget.X_BUT_PRESSED, EnderWidget.X_BUT_HOVER); |
<<<<<<<
import com.google.common.collect.Lists;
=======
import cpw.mods.fml.common.Optional;
>>>>>>>
<<<<<<<
import cpw.mods.fml.common.Optional;
=======
>>>>>>>
import com.google.common.collect.Lists;
import cpw.mods.fml.common.Optional;
<<<<<<<
protected List<TextFieldEIO> textFields = Lists.newArrayList();
=======
protected List<GhostSlot> ghostSlots = new ArrayList<GhostSlot>();
protected GhostSlot hoverGhostSlot;
>>>>>>>
protected List<TextFieldEIO> textFields = Lists.newArrayList();
protected List<GhostSlot> ghostSlots = new ArrayList<GhostSlot>();
protected GhostSlot hoverGhostSlot;
<<<<<<<
public final void drawGuiContainerForegroundLayer(int mouseX, int mouseY) {
=======
protected void drawGuiContainerBackgroundLayer(float f, int mouseX, int mouseY) {
drawGhostSlots(mouseX, mouseY);
}
@Override
protected final void drawGuiContainerForegroundLayer(int mouseX, int mouseY) {
>>>>>>>
protected final void drawGuiContainerForegroundLayer(int mouseX, int mouseY) { |
<<<<<<<
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.5.6", dependencies = "required-after:Forge@[9.11.0.883,)")
=======
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.6.0", dependencies = "required-after:Forge@[9.10.0.800,)")
>>>>>>>
@Mod(name = "EnderIO", modid = "EnderIO", version = "0.6.0", dependencies = "required-after:Forge@[9.11.0.883,)") |
<<<<<<<
=======
install(new UrlModule(urlConfig, uiOptions, authConfig));
install(new UiRpcModule());
install(new GerritRequestModule());
install(new GitOverHttpServlet.Module());
bind(GitWebConfig.class).toInstance(gitWebConfig);
if (gitWebConfig.getGitwebCGI() != null) {
install(new GitWebModule());
}
bind(ContactStore.class).toProvider(ContactStoreProvider.class).in(
SINGLETON);
bind(GerritConfigProvider.class);
bind(GerritConfig.class).toProvider(GerritConfigProvider.class);
DynamicSet.setOf(binder(), WebUiPlugin.class);
install(new AsyncReceiveCommits.Module());
bind(SocketAddress.class).annotatedWith(RemotePeer.class).toProvider(
HttpRemotePeerProvider.class).in(RequestScoped.class);
listener().toInstance(registerInParentInjectors());
>>>>>>> |
<<<<<<<
sb.append("Power Network");
player.sendChatToPlayer(ChatMessageComponent.createFromText(sb.toString()));
=======
sb.append(NET_HEADING);
player.sendChatToPlayer(ChatMessageComponent.func_111066_d(sb.toString()));
>>>>>>>
sb.append(NET_HEADING);
player.sendChatToPlayer(ChatMessageComponent.createFromText(sb.toString()));
<<<<<<<
sb.append(" Average input over 5 seconds: ");
sb.append(FLOAT_NF.format(tracker.getAverageMjTickRecieved()));
player.sendChatToPlayer(ChatMessageComponent.createFromText(sb.toString()));
=======
sb.append(AVE_IN);
sb.append(PowerDisplayUtil.formatPowerFloat(tracker.getAverageMjTickRecieved()));
player.sendChatToPlayer(ChatMessageComponent.func_111066_d(sb.toString()));
>>>>>>>
sb.append(AVE_IN);
sb.append(PowerDisplayUtil.formatPowerFloat(tracker.getAverageMjTickRecieved()));
player.sendChatToPlayer(ChatMessageComponent.createFromText(sb.toString()));
<<<<<<<
sb.append("Power Conduit");
player.sendChatToPlayer(ChatMessageComponent.createFromText(sb.toString()));
=======
sb.append(ENERGY_CONDUIT);
player.sendChatToPlayer(ChatMessageComponent.func_111066_d(sb.toString()));
>>>>>>>
sb.append(ENERGY_CONDUIT);
player.sendChatToPlayer(ChatMessageComponent.createFromText(sb.toString()));
<<<<<<<
sb.append(" Average input over 5 seconds: ");
sb.append(FLOAT_NF.format(tracker.getAverageMjTickRecieved()));
player.sendChatToPlayer(ChatMessageComponent.createFromText(sb.toString()));
=======
sb.append(AVE_IN);
sb.append(PowerDisplayUtil.formatPowerFloat(tracker.getAverageMjTickRecieved()));
player.sendChatToPlayer(ChatMessageComponent.func_111066_d(sb.toString()));
>>>>>>>
sb.append(AVE_IN);
sb.append(PowerDisplayUtil.formatPowerFloat(tracker.getAverageMjTickRecieved()));
player.sendChatToPlayer(ChatMessageComponent.createFromText(sb.toString())); |
<<<<<<<
import crazypants.enderio.machines.config.config.TankConfig;
=======
import crazypants.enderio.machines.config.config.PersonalConfig;
>>>>>>>
import crazypants.enderio.machines.config.config.PersonalConfig;
import crazypants.enderio.machines.config.config.TankConfig;
<<<<<<<
// add mending recipes
if (TankConfig.allowMending.get()) {
Map<Enchantment, Integer> enchMap = Collections.singletonMap(Enchantments.MENDING, 1);
final int maxMendable = TileTank.xpToDurability(XpUtil.liquidToExperience(16000));
for (ItemStack stack : validItems) {
if (stack.isItemStackDamageable()) {
ItemStack enchantedStack;
if (EnchantmentHelper.getEnchantmentLevel(Enchantments.MENDING, stack) > 0) {
enchantedStack = stack.copy();
} else if (Enchantments.MENDING.canApply(stack)) {
enchantedStack = stack.copy();
EnchantmentHelper.setEnchantments(enchMap, enchantedStack);
} else {
continue;
}
=======
if (PersonalConfig.enableTankMendingJEIRecipes.get()) {
// add mending recipes
Map<Enchantment, Integer> enchMap = Collections.singletonMap(Enchantments.MENDING, 1);
final int maxMendable = TileTank.xpToDurability(XpUtil.liquidToExperience(16000));
for (ItemStack stack : validItems) {
if (stack.isItemStackDamageable()) {
ItemStack enchantedStack;
if (EnchantmentHelper.getEnchantmentLevel(Enchantments.MENDING, stack) > 0) {
enchantedStack = stack.copy();
} else if (Enchantments.MENDING.canApply(stack)) {
enchantedStack = stack.copy();
EnchantmentHelper.setEnchantments(enchMap, enchantedStack);
} else {
continue;
}
>>>>>>>
if (TankConfig.allowMending.get()) {
if (PersonalConfig.enableTankMendingJEIRecipes.get()) {
// add mending recipes
Map<Enchantment, Integer> enchMap = Collections.singletonMap(Enchantments.MENDING, 1);
final int maxMendable = TileTank.xpToDurability(XpUtil.liquidToExperience(16000));
for (ItemStack stack : validItems) {
if (stack.isItemStackDamageable()) {
ItemStack enchantedStack;
if (EnchantmentHelper.getEnchantmentLevel(Enchantments.MENDING, stack) > 0) {
enchantedStack = stack.copy();
} else if (Enchantments.MENDING.canApply(stack)) {
enchantedStack = stack.copy();
EnchantmentHelper.setEnchantments(enchMap, enchantedStack);
} else {
continue;
}
<<<<<<<
if (damagedStack.getItemDamage() != enchantedStack.getItemDamage()) {
result.add(new TankRecipeWrapper(new FluidStack(Fluids.XP_JUICE.getFluid(), XpUtil.experienceToLiquid(TileTank.durabilityToXp(damageMendable))),
null, damagedStack, enchantedStack));
}
=======
if (damagedStack.getItemDamage() != enchantedStack.getItemDamage()) {
result.add(
new TankRecipeWrapper(new FluidStack(Fluids.XP_JUICE.getFluid(), XpUtil.experienceToLiquid(TileTank.durabilityToXp(damageMendable))), null,
damagedStack, enchantedStack));
}
>>>>>>>
if (damagedStack.getItemDamage() != enchantedStack.getItemDamage()) {
result.add(new TankRecipeWrapper(new FluidStack(Fluids.XP_JUICE.getFluid(), XpUtil.experienceToLiquid(TileTank.durabilityToXp(damageMendable))),
null, damagedStack, enchantedStack));
}
} |
<<<<<<<
import crazypants.enderio.machine.generator.zombie.NutrientTank;
=======
import crazypants.enderio.gui.MultiIconButtonEIO;
import crazypants.enderio.gui.TextFieldEIO;
import crazypants.enderio.gui.TooltipAddera;
import crazypants.enderio.gui.VScrollbarEIO;
>>>>>>>
<<<<<<<
=======
import crazypants.enderio.tool.SmartTank;
import crazypants.gui.GhostSlot;
import crazypants.gui.GuiToolTip;
import crazypants.render.RenderUtil;
import crazypants.util.ItemUtil;
import crazypants.util.Lang;
import net.minecraft.client.gui.inventory.GuiContainer;
>>>>>>>
import crazypants.enderio.tool.SmartTank;
<<<<<<<
private final VScrollbar scrollbar;
private final MultiIconButton btnClear;
=======
private final GuiToolTip ttSetReceipe;
private final VScrollbarEIO scrollbar;
private final MultiIconButtonEIO btnClear;
>>>>>>>
private final VScrollbar scrollbar;
private final MultiIconButton btnClear;
private final GuiToolTip ttSetReceipe;
<<<<<<<
btnSort = new IconButton(this, ID_SORT, 233, 27, getSortOrderIcon()) {
=======
tfFilter.setText(te.getGuiFilterString());
btnSort = new IconButtonEIO(this, ID_SORT, 233, 27, getSortOrderIcon()) {
>>>>>>>
tfFilter.setText(te.getGuiFilterString());
btnSort = new IconButton(this, ID_SORT, 233, 27, getSortOrderIcon()) {
<<<<<<<
SpecialTooltipHandler.addTooltipFromResources(list, "enderio.gui.inventorypanel.tooltip.clear.line");
=======
TooltipAddera.addTooltipFromResources(list, "enderio.gui.inventorypanel.tooltip.setrecipe.line");
ttSetReceipe = new GuiToolTip(btnRefill, list) {
@Override
public boolean shouldDraw() {
return super.shouldDraw() && getContainer().hasCraftingRecipe();
}
};
addToolTip(ttSetReceipe);
list.clear();
TooltipAddera.addTooltipFromResources(list, "enderio.gui.inventorypanel.tooltip.clear.line");
>>>>>>>
SpecialTooltipHandler.addTooltipFromResources(list, "enderio.gui.inventorypanel.tooltip.setrecipe.line");
ttSetReceipe = new GuiToolTip(btnRefill, list) {
@Override
public boolean shouldDraw() {
return super.shouldDraw() && getContainer().hasCraftingRecipe();
}
};
addToolTip(ttSetReceipe);
list.clear();
SpecialTooltipHandler.addTooltipFromResources(list, "enderio.gui.inventorypanel.tooltip.clear.line"); |
<<<<<<<
=======
import io.netty.channel.ChannelHandlerContext;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.world.World;
>>>>>>>
<<<<<<<
import cpw.mods.fml.common.network.simpleimpl.IMessage;
import cpw.mods.fml.common.network.simpleimpl.IMessageHandler;
import cpw.mods.fml.common.network.simpleimpl.MessageContext;
=======
import crazypants.enderio.conduit.IConduitBundle;
import crazypants.enderio.conduit.item.FilterRegister;
>>>>>>>
import cpw.mods.fml.common.network.simpleimpl.IMessage;
import cpw.mods.fml.common.network.simpleimpl.IMessageHandler;
import cpw.mods.fml.common.network.simpleimpl.MessageContext;
import crazypants.enderio.conduit.item.FilterRegister;
<<<<<<<
private void applyFilter(ForgeDirection dir, IItemConduit conduit, ItemFilter filter, boolean isInput) {
if(filter == null) {
=======
private void applyFilter(IItemConduit conduit, IItemFilter filter, boolean isInput) {
// if(filter == null) {
>>>>>>>
private void applyFilter(ForgeDirection dir, IItemConduit conduit, IItemFilter filter, boolean isInput) {
// if(filter == null) { |
<<<<<<<
=======
blockVacuumChest = BlockVacuumChest.create();
>>>>>>>
blockVacuumChest = BlockVacuumChest.create();
<<<<<<<
=======
>>>>>>> |
<<<<<<<
=======
import cpw.mods.fml.common.registry.TickRegistry;
import cpw.mods.fml.relauncher.Side;
>>>>>>> |
<<<<<<<
case "SortedMap":
case "NavigableMap":
return ParameterizedTypeName.get(
ClassName.get(TreeMap.class),
genericArgument(field, 0), genericArgument(field, 1));
=======
case "Collection":
return ParameterizedTypeName.get(
ClassName.get(ArrayList.class),
genericArgument(field, 0));
>>>>>>>
case "SortedMap":
case "NavigableMap":
return ParameterizedTypeName.get(
ClassName.get(TreeMap.class),
genericArgument(field, 0), genericArgument(field, 1));
case "Collection":
return ParameterizedTypeName.get(
ClassName.get(ArrayList.class),
genericArgument(field, 0));
<<<<<<<
returnType.startsWith("java.util.Set<") ||
returnType.startsWith("java.util.SortedSet<") ||
returnType.startsWith("java.util.NavigableSet<");
=======
returnType.startsWith("java.util.Collection<") ||
returnType.startsWith("java.util.Set<");
>>>>>>>
returnType.startsWith("java.util.Collection<") ||
returnType.startsWith("java.util.Set<") ||
returnType.startsWith("java.util.SortedSet<") ||
returnType.startsWith("java.util.NavigableSet<");
<<<<<<<
case "SortedMap":
return "unmodifiableSortedMap";
case "NavigableMap":
return "unmodifiableNavigableMap";
=======
case "Collection":
return "unmodifiableList";
>>>>>>>
case "SortedMap":
return "unmodifiableSortedMap";
case "NavigableMap":
return "unmodifiableNavigableMap";
case "Collection":
return "unmodifiableList";
<<<<<<<
case "SortedMap":
return "emptySortedMap";
case "NavigableMap":
return "emptyNavigableMap";
=======
case "Collection":
return "emptyList";
>>>>>>>
case "SortedMap":
return "emptySortedMap";
case "NavigableMap":
return "emptyNavigableMap";
case "Collection":
return "emptyList";
<<<<<<<
} else if (returnType.startsWith("java.util.SortedMap<")) {
return "SortedMap";
} else if (returnType.startsWith("java.util.NavigableMap<")) {
return "NavigableMap";
=======
} else if (returnType.startsWith("java.util.Collection<")) {
return "Collection";
>>>>>>>
} else if (returnType.startsWith("java.util.SortedMap<")) {
return "SortedMap";
} else if (returnType.startsWith("java.util.NavigableMap<")) {
return "NavigableMap";
} else if (returnType.startsWith("java.util.Collection<")) {
return "Collection"; |
<<<<<<<
=======
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
>>>>>>>
import org.elasticsearch.common.ParsingException;
<<<<<<<
public BoolQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
=======
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
>>>>>>>
public BoolQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException { |
<<<<<<<
import java.util.stream.Collectors;
=======
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
>>>>>>>
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; |
<<<<<<<
/**
*
*
*/
public static final class DirectCandidateGenerator extends CandidateGenerator {
private final String field;
private String preFilter;
private String postFilter;
private String suggestMode;
private Float accuracy;
private Integer size;
private String sort;
private String stringDistance;
private Integer maxEdits;
private Integer maxInspections;
private Float maxTermFreq;
private Integer prefixLength;
private Integer minWordLength;
private Float minDocFreq;
/**
* @param field Sets from what field to fetch the candidate suggestions from.
*/
public DirectCandidateGenerator(String field) {
super("direct_generator");
this.field = field;
}
/**
* The global suggest mode controls what suggested terms are included or
* controls for what suggest text tokens, terms should be suggested for.
* Three possible values can be specified:
* <ol>
* <li><code>missing</code> - Only suggest terms in the suggest text
* that aren't in the index. This is the default.
* <li><code>popular</code> - Only suggest terms that occur in more docs
* then the original suggest text term.
* <li><code>always</code> - Suggest any matching suggest terms based on
* tokens in the suggest text.
* </ol>
*/
public DirectCandidateGenerator suggestMode(String suggestMode) {
this.suggestMode = suggestMode;
return this;
}
/**
* Sets how similar the suggested terms at least need to be compared to
* the original suggest text tokens. A value between 0 and 1 can be
* specified. This value will be compared to the string distance result
* of each candidate spelling correction.
* <p>
* Default is <tt>0.5</tt>
*/
public DirectCandidateGenerator accuracy(float accuracy) {
this.accuracy = accuracy;
return this;
}
/**
* Sets the maximum suggestions to be returned per suggest text term.
*/
public DirectCandidateGenerator size(int size) {
if (size <= 0) {
throw new IllegalArgumentException("Size must be positive");
}
this.size = size;
return this;
}
/**
* Sets how to sort the suggest terms per suggest text token. Two
* possible values:
* <ol>
* <li><code>score</code> - Sort should first be based on score, then
* document frequency and then the term itself.
* <li><code>frequency</code> - Sort should first be based on document
* frequency, then scotr and then the term itself.
* </ol>
* <p>
* What the score is depends on the suggester being used.
*/
public DirectCandidateGenerator sort(String sort) {
this.sort = sort;
return this;
}
/**
* Sets what string distance implementation to use for comparing how
* similar suggested terms are. Four possible values can be specified:
* <ol>
* <li><code>internal</code> - This is the default and is based on
* <code>damerau_levenshtein</code>, but highly optimized for comparing
* string distance for terms inside the index.
* <li><code>damerau_levenshtein</code> - String distance algorithm
* based on Damerau-Levenshtein algorithm.
* <li><code>levenstein</code> - String distance algorithm based on
* Levenstein edit distance algorithm.
* <li><code>jarowinkler</code> - String distance algorithm based on
* Jaro-Winkler algorithm.
* <li><code>ngram</code> - String distance algorithm based on character
* n-grams.
* </ol>
*/
public DirectCandidateGenerator stringDistance(String stringDistance) {
this.stringDistance = stringDistance;
return this;
}
/**
* Sets the maximum edit distance candidate suggestions can have in
* order to be considered as a suggestion. Can only be a value between 1
* and 2. Any other value result in an bad request error being thrown.
* Defaults to <tt>2</tt>.
*/
public DirectCandidateGenerator maxEdits(Integer maxEdits) {
this.maxEdits = maxEdits;
return this;
}
/**
* A factor that is used to multiply with the size in order to inspect
* more candidate suggestions. Can improve accuracy at the cost of
* performance. Defaults to <tt>5</tt>.
*/
public DirectCandidateGenerator maxInspections(Integer maxInspections) {
this.maxInspections = maxInspections;
return this;
}
/**
* Sets a maximum threshold in number of documents a suggest text token
* can exist in order to be corrected. Can be a relative percentage
* number (e.g 0.4) or an absolute number to represent document
* frequencies. If an value higher than 1 is specified then fractional
* can not be specified. Defaults to <tt>0.01</tt>.
* <p>
* This can be used to exclude high frequency terms from being
* suggested. High frequency terms are usually spelled correctly on top
* of this this also improves the suggest performance.
*/
public DirectCandidateGenerator maxTermFreq(float maxTermFreq) {
this.maxTermFreq = maxTermFreq;
return this;
}
/**
* Sets the number of minimal prefix characters that must match in order
* be a candidate suggestion. Defaults to 1. Increasing this number
* improves suggest performance. Usually misspellings don't occur in the
* beginning of terms.
*/
public DirectCandidateGenerator prefixLength(int prefixLength) {
this.prefixLength = prefixLength;
return this;
}
/**
* The minimum length a suggest text term must have in order to be
* corrected. Defaults to <tt>4</tt>.
*/
public DirectCandidateGenerator minWordLength(int minWordLength) {
this.minWordLength = minWordLength;
return this;
}
/**
* Sets a minimal threshold in number of documents a suggested term
* should appear in. This can be specified as an absolute number or as a
* relative percentage of number of documents. This can improve quality
* by only suggesting high frequency terms. Defaults to 0f and is not
* enabled. If a value higher than 1 is specified then the number cannot
* be fractional.
*/
public DirectCandidateGenerator minDocFreq(float minDocFreq) {
this.minDocFreq = minDocFreq;
return this;
}
/**
* Sets a filter (analyzer) that is applied to each of the tokens passed to this candidate generator.
* This filter is applied to the original token before candidates are generated.
*/
public DirectCandidateGenerator preFilter(String preFilter) {
this.preFilter = preFilter;
return this;
}
/**
* Sets a filter (analyzer) that is applied to each of the generated tokens
* before they are passed to the actual phrase scorer.
*/
public DirectCandidateGenerator postFilter(String postFilter) {
this.postFilter = postFilter;
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (field != null) {
builder.field("field", field);
}
if (suggestMode != null) {
builder.field("suggest_mode", suggestMode);
}
if (accuracy != null) {
builder.field("accuracy", accuracy);
}
if (size != null) {
builder.field("size", size);
}
if (sort != null) {
builder.field("sort", sort);
}
if (stringDistance != null) {
builder.field("string_distance", stringDistance);
}
if (maxEdits != null) {
builder.field("max_edits", maxEdits);
}
if (maxInspections != null) {
builder.field("max_inspections", maxInspections);
}
if (maxTermFreq != null) {
builder.field("max_term_freq", maxTermFreq);
}
if (prefixLength != null) {
builder.field("prefix_length", prefixLength);
}
if (minWordLength != null) {
builder.field("min_word_length", minWordLength);
}
if (minDocFreq != null) {
builder.field("min_doc_freq", minDocFreq);
}
if (preFilter != null) {
builder.field("pre_filter", preFilter);
}
if (postFilter != null) {
builder.field("post_filter", postFilter);
}
builder.endObject();
return builder;
}
}
@Override
public String getWriteableName() {
return SUGGESTION_NAME;
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeOptionalFloat(maxErrors);
out.writeOptionalFloat(realWordErrorLikelihood);
out.writeOptionalFloat(confidence);
out.writeOptionalVInt(gramSize);
// NORELEASE model.writeTo();
out.writeOptionalBoolean(forceUnigrams);
out.writeOptionalVInt(tokenLimit);
out.writeOptionalString(preTag);
out.writeOptionalString(postTag);
out.writeOptionalString(separator);
if (collateQuery != null) {
out.writeBoolean(true);
collateQuery.writeTo(out);
} else {
out.writeBoolean(false);
}
out.writeMap(collateParams);
out.writeOptionalBoolean(collatePrune);
// NORELEASE write Map<String, List<CandidateGenerator>> generators = new HashMap<>();
}
@Override
public PhraseSuggestionBuilder doReadFrom(StreamInput in, String name) throws IOException {
PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(name);
builder.maxErrors = in.readOptionalFloat();
builder.realWordErrorLikelihood = in.readOptionalFloat();
builder.confidence = in.readOptionalFloat();
builder.gramSize = in.readOptionalVInt();
// NORELEASE read model
builder.forceUnigrams = in.readOptionalBoolean();
builder.tokenLimit = in.readOptionalVInt();
builder.preTag = in.readOptionalString();
builder.postTag = in.readOptionalString();
builder.separator = in.readOptionalString();
if (in.readBoolean()) {
builder.collateQuery = Template.readTemplate(in);
}
builder.collateParams = in.readMap();
builder.collatePrune = in.readOptionalBoolean();
// NORELEASE read Map<String, List<CandidateGenerator>> generators;
return builder;
}
@Override
protected boolean doEquals(PhraseSuggestionBuilder other) {
return Objects.equals(maxErrors, other.maxErrors) &&
Objects.equals(separator, other.separator) &&
Objects.equals(realWordErrorLikelihood, other.realWordErrorLikelihood) &&
Objects.equals(confidence, other.confidence) &&
// NORELEASE Objects.equals(generator, other.generator) &&
Objects.equals(gramSize, other.gramSize) &&
// NORELEASE Objects.equals(model, other.model) &&
Objects.equals(forceUnigrams, other.forceUnigrams) &&
Objects.equals(tokenLimit, other.tokenLimit) &&
Objects.equals(preTag, other.preTag) &&
Objects.equals(postTag, other.postTag) &&
Objects.equals(collateQuery, other.collateQuery) &&
Objects.equals(collateParams, other.collateParams) &&
Objects.equals(collatePrune, other.collatePrune);
}
@Override
protected int doHashCode() {
return Objects.hash(maxErrors, separator, realWordErrorLikelihood, confidence,
/** NORELEASE generators, */
gramSize,
/** NORELEASE model, */
forceUnigrams, tokenLimit, preTag, postTag,
collateQuery, collateParams, collatePrune);
}
=======
>>>>>>> |
<<<<<<<
=======
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
>>>>>>>
import org.elasticsearch.common.ParsingException;
<<<<<<<
public MissingQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
=======
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
>>>>>>>
public MissingQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
<<<<<<<
@Override
public MissingQueryBuilder getBuilderPrototype() {
return MissingQueryBuilder.PROTOTYPE;
=======
public static Query newFilter(QueryParseContext parseContext, String fieldPattern, boolean existence, boolean nullValue, String queryName) {
if (!existence && !nullValue) {
throw new ParsingException(parseContext, "missing must have either existence, or null_value, or both set to true");
}
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType == null) {
// can only happen when no types exist, so no docs exist either
return Queries.newMatchNoDocsQuery();
}
ObjectMapper objectMapper = parseContext.getObjectMapper(fieldPattern);
if (objectMapper != null) {
// automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*";
}
Collection<String> fields = parseContext.simpleMatchToIndexNames(fieldPattern);
if (fields.isEmpty()) {
if (existence) {
// if we ask for existence of fields, and we found none, then we should match on all
return Queries.newMatchAllQuery();
}
return null;
}
Query existenceFilter = null;
Query nullFilter = null;
if (existence) {
BooleanQuery.Builder boolFilter = new BooleanQuery.Builder();
for (String field : fields) {
MappedFieldType fieldType = parseContext.fieldMapper(field);
Query filter = null;
if (fieldNamesFieldType.isEnabled()) {
final String f;
if (fieldType != null) {
f = fieldType.names().indexName();
} else {
f = field;
}
filter = fieldNamesFieldType.termQuery(f, parseContext);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && fieldType != null) {
filter = fieldType.rangeQuery(null, null, true, true);
}
if (filter == null) {
filter = new TermRangeQuery(field, null, null, true, true);
}
boolFilter.add(filter, BooleanClause.Occur.SHOULD);
}
existenceFilter = boolFilter.build();
existenceFilter = Queries.not(existenceFilter);;
}
if (nullValue) {
for (String field : fields) {
MappedFieldType fieldType = parseContext.fieldMapper(field);
if (fieldType != null) {
nullFilter = fieldType.nullValueQuery();
}
}
}
Query filter;
if (nullFilter != null) {
if (existenceFilter != null) {
filter = new BooleanQuery.Builder()
.add(existenceFilter, BooleanClause.Occur.SHOULD)
.add(nullFilter, BooleanClause.Occur.SHOULD)
.build();
} else {
filter = nullFilter;
}
} else {
filter = existenceFilter;
}
if (filter == null) {
return null;
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, existenceFilter);
}
return new ConstantScoreQuery(filter);
>>>>>>>
@Override
public MissingQueryBuilder getBuilderPrototype() {
return MissingQueryBuilder.PROTOTYPE; |
<<<<<<<
Engine.Delete engineDelete = IndexShard.prepareDelete(uid.type(), uid.id(), delete.uid(), delete.seqNo(),
delete.version(), delete.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY);
engine.delete(engineDelete);
=======
final Engine.Delete engineDelete = new Engine.Delete(uid.type(), uid.id(), delete.uid(), delete.version(),
delete.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, System.nanoTime(), false);
delete(engine, engineDelete);
>>>>>>>
final Engine.Delete engineDelete = IndexShard.prepareDelete(uid.type(), uid.id(), delete.uid(), delete.seqNo(),
delete.version(), delete.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY);
delete(engine, engineDelete); |
<<<<<<<
final boolean created;
index.updateVersion(updatedVersion);
if (index.origin() == Operation.Origin.PRIMARY) {
index.updateSeqNo(seqNoService.generateSeqNo());
}
if (currentVersion == Versions.NOT_FOUND) {
// document does not exists, we can optimize for create
created = true;
index(index, indexWriter);
} else {
created = update(index, versionValue, indexWriter);
}
=======
final long updatedVersion = updateVersion(index, currentVersion, expectedVersion);
final boolean created = indexOrUpdate(index, currentVersion, versionValue);
>>>>>>>
maybeUpdateSequenceNumber(index);
final long updatedVersion = updateVersion(index, currentVersion, expectedVersion);
final boolean created = indexOrUpdate(index, currentVersion, versionValue);
<<<<<<<
long updatedVersion;
long expectedVersion = delete.version();
if (delete.versionType().isVersionConflictForWrites(currentVersion, expectedVersion, deleted)) {
if (delete.origin().isRecovery()) {
return;
} else {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(),
delete.versionType().explainConflictForWrites(currentVersion, expectedVersion, deleted));
}
}
updatedVersion = delete.versionType().updateVersion(currentVersion, expectedVersion);
if (delete.origin() == Operation.Origin.PRIMARY) {
delete.updateSeqNo(seqNoService.generateSeqNo());
}
final boolean found;
if (currentVersion == Versions.NOT_FOUND) {
// doc does not exist and no prior deletes
found = false;
} else if (versionValue != null && versionValue.delete()) {
// a "delete on delete", in this case, we still increment the version, log it, and return that version
found = false;
} else {
// we deleted a currently existing document
indexWriter.deleteDocuments(delete.uid());
found = true;
}
=======
final long expectedVersion = delete.version();
if (checkVersionConflict(delete, currentVersion, expectedVersion, deleted)) return;
final long updatedVersion = updateVersion(delete, currentVersion, expectedVersion);
final boolean found = deleteIfFound(delete, currentVersion, deleted, versionValue);
>>>>>>>
final long expectedVersion = delete.version();
if (checkVersionConflict(delete, currentVersion, expectedVersion, deleted)) return;
maybeUpdateSequenceNumber(delete);
final long updatedVersion = updateVersion(delete, currentVersion, expectedVersion);
final boolean found = deleteIfFound(delete, currentVersion, deleted, versionValue);
<<<<<<<
if (delete.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) {
final Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
delete.setTranslogLocation(translogLocation);
versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), delete.getTranslogLocation()));
} else {
// we do not replay in to the translog, so there is no
// translog location; that is okay because real-time
// gets are not possible during recovery and we will
// flush when the recovery is complete
versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), null));
}
} finally {
if (delete.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) {
seqNoService.markSeqNoAsCompleted(delete.seqNo());
}
=======
maybeAddToTranslog(delete, updatedVersion, Translog.Delete::new, DeleteVersionValue::new);
}
}
private boolean deleteIfFound(Delete delete, long currentVersion, boolean deleted, VersionValue versionValue) throws IOException {
final boolean found;
if (currentVersion == Versions.NOT_FOUND) {
// doc does not exist and no prior deletes
found = false;
} else if (versionValue != null && deleted) {
// a "delete on delete", in this case, we still increment the version, log it, and return that version
found = false;
} else {
// we deleted a currently existing document
indexWriter.deleteDocuments(delete.uid());
found = true;
>>>>>>>
maybeAddToTranslog(delete, updatedVersion, Translog.Delete::new, DeleteVersionValue::new);
} finally {
if (delete.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) {
seqNoService.markSeqNoAsCompleted(delete.seqNo());
}
}
}
private boolean deleteIfFound(Delete delete, long currentVersion, boolean deleted, VersionValue versionValue) throws IOException {
final boolean found;
if (currentVersion == Versions.NOT_FOUND) {
// doc does not exist and no prior deletes
found = false;
} else if (versionValue != null && deleted) {
// a "delete on delete", in this case, we still increment the version, log it, and return that version
found = false;
} else {
// we deleted a currently existing document
indexWriter.deleteDocuments(delete.uid());
found = true; |
<<<<<<<
public static class ScriptedTimeoutPlugin extends Plugin {
=======
public static class ScriptedTimeoutPlugin extends Plugin implements ScriptPlugin {
@Override
public String name() {
return "test-scripted-search-timeout";
}
@Override
public String description() {
return "Test for scripted timeouts on searches";
}
>>>>>>>
public static class ScriptedTimeoutPlugin extends Plugin implements ScriptPlugin { |
<<<<<<<
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
=======
>>>>>>>
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; |
<<<<<<<
import com.google.inject.Singleton;
=======
import com.google.inject.Provider;
>>>>>>>
import com.google.inject.Provider;
import com.google.inject.Singleton; |
<<<<<<<
ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT);
DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT);
=======
>>>>>>>
<<<<<<<
ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.build();
final IndexMetaData indexMetaData = metaData.index("test");
ShardRouting test_0 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.build();
final IndexMetaData indexMetaData = metaData.index("test");
ShardRouting test_0 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 0, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<<
ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
ShardRouting test_1 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 1, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
ShardRouting test_1 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 1, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<<
ShardRouting test_2 = ShardRouting.newUnassigned("test", 2, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
ShardRouting test_2 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 2, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
ShardRouting test_2 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 2, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<<
ShardRouting test_3 = ShardRouting.newUnassigned("test", 3, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
ShardRouting test_3 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 3, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
ShardRouting test_3 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 3, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<<
ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
final Index index = new Index("test", "_na_");
ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
final Index index = new Index("test", "_na_");
ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<<
ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<<
ShardRouting test_2 = ShardRouting.newUnassigned("test", 2, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
ShardRouting test_2 = ShardRouting.newUnassigned(index, 2, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
ShardRouting test_2 = ShardRouting.newUnassigned(index, 2, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<<
ShardRouting test_3 = ShardRouting.newUnassigned("test", 3, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
ShardRouting test_3 = ShardRouting.newUnassigned(index, 3, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
ShardRouting test_3 = ShardRouting.newUnassigned(index, 3, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<<
ShardRouting other_0 = ShardRouting.newUnassigned("other", 0, null, 1, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
=======
ShardRouting other_0 = ShardRouting.newUnassigned(new Index("other", "_NA_"), 0, null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
>>>>>>>
ShardRouting other_0 = ShardRouting.newUnassigned(new Index("other", "_NA_"), 0, null, 1, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); |
<<<<<<<
doc.seqNo().setLongValue(seqNo);
return new Engine.Index(docMapper.getDocumentMapper().uidMapper().term(doc.uid().stringValue()), doc, seqNo, version, versionType, origin, startTime);
=======
MappedFieldType uidFieldType = docMapper.getDocumentMapper().uidMapper().fieldType();
Query uidQuery = uidFieldType.termQuery(doc.uid(), null);
Term uid = MappedFieldType.extractTerm(uidQuery);
return new Engine.Index(uid, doc, version, versionType, origin, startTime);
>>>>>>>
MappedFieldType uidFieldType = docMapper.getDocumentMapper().uidMapper().fieldType();
Query uidQuery = uidFieldType.termQuery(doc.uid(), null);
Term uid = MappedFieldType.extractTerm(uidQuery);
doc.seqNo().setLongValue(seqNo);
return new Engine.Index(uid, doc, seqNo, version, versionType, origin, startTime);
<<<<<<<
final Term uid = documentMapper.uidMapper().term(Uid.createUid(type, id));
return prepareDelete(type, id, uid, SequenceNumbersService.UNASSIGNED_SEQ_NO, version, versionType, Engine.Operation.Origin.PRIMARY);
=======
final MappedFieldType uidFieldType = documentMapper.uidMapper().fieldType();
final Query uidQuery = uidFieldType.termQuery(Uid.createUid(type, id), null);
final Term uid = MappedFieldType.extractTerm(uidQuery);
return prepareDelete(type, id, uid, version, versionType, Engine.Operation.Origin.PRIMARY);
>>>>>>>
final MappedFieldType uidFieldType = documentMapper.uidMapper().fieldType();
final Query uidQuery = uidFieldType.termQuery(Uid.createUid(type, id), null);
final Term uid = MappedFieldType.extractTerm(uidQuery);
return prepareDelete(type, id, uid, SequenceNumbersService.UNASSIGNED_SEQ_NO, version, versionType, Engine.Operation.Origin.PRIMARY);
<<<<<<<
final Term uid = documentMapper.uidMapper().term(Uid.createUid(type, id));
return prepareDelete(type, id, uid, seqNo, version, versionType, Engine.Operation.Origin.REPLICA);
=======
final MappedFieldType uidFieldType = documentMapper.uidMapper().fieldType();
final Query uidQuery = uidFieldType.termQuery(Uid.createUid(type, id), null);
final Term uid = MappedFieldType.extractTerm(uidQuery);
return prepareDelete(type, id, uid, version, versionType, Engine.Operation.Origin.REPLICA);
>>>>>>>
final MappedFieldType uidFieldType = documentMapper.uidMapper().fieldType();
final Query uidQuery = uidFieldType.termQuery(Uid.createUid(type, id), null);
final Term uid = MappedFieldType.extractTerm(uidQuery);
return prepareDelete(type, id, uid, seqNo, version, versionType, Engine.Operation.Origin.REPLICA); |
<<<<<<<
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
=======
import org.elasticsearch.index.Index;
>>>>>>>
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; |
<<<<<<<
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.search.aggregations.reducers.Reducer;
=======
>>>>>>>
import org.elasticsearch.search.aggregations.reducers.Reducer; |
<<<<<<<
try {
// add the response
final WriteResult<DeleteResponse> writeResult = TransportDeleteAction.executeDeleteRequestOnPrimary(deleteRequest, indexShard);
DeleteResponse deleteResponse = writeResult.getResponse();
location = locationToSync(location, writeResult.getLocation());
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_DELETE, deleteResponse));
} catch (Exception e) {
// nocommit: since we now have RetryOnPrimaryException, retrying doesn't always mean the shard is closed.
// some operations were already perform and have a seqno assigned. we shouldn't just reindex them
// rethrow the failure if we are going to retry on primary and let parent failure to handle it
if (retryPrimaryException(e)) {
// restore updated versions...
for (int j = 0; j < requestIndex; j++) {
applyVersion(request.items()[j], preVersions[j], preVersionTypes[j]);
}
throw (ElasticsearchException) e;
}
logFailure(e, "delete", request.shardId(), deleteRequest);
// if its a conflict failure, and we already executed the request on a primary (and we execute it
// again, due to primary relocation and only processing up to N bulk items when the shard gets closed)
// then just use the response we got from the successful execution
if (item.getPrimaryResponse() != null && isConflictException(e)) {
setResponse(item, item.getPrimaryResponse());
} else {
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_DELETE,
new BulkItemResponse.Failure(request.index(), deleteRequest.type(), deleteRequest.id(), e)));
}
=======
private UpdateResultHolder(BulkItemRequest replicaRequest, Engine.Result operationResult,
DocWriteResponse response) {
this.replicaRequest = replicaRequest;
this.operationResult = operationResult;
this.response = response;
>>>>>>>
private UpdateResultHolder(BulkItemRequest replicaRequest, Engine.Result operationResult,
DocWriteResponse response) {
this.replicaRequest = replicaRequest;
this.operationResult = operationResult;
this.response = response;
<<<<<<<
item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), indexRequest);
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, updateResponse));
break;
case DELETED:
@SuppressWarnings("unchecked")
WriteResult<DeleteResponse> writeResult = updateResult.writeResult;
DeleteResponse response = writeResult.getResponse();
DeleteRequest deleteRequest = updateResult.request();
updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getVersion(), response.getResult());
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null));
// Replace the update request to the translated delete request to execute on the replica.
item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), deleteRequest);
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, updateResponse));
=======
// set translated request as replica request
replicaRequest = new BulkItemRequest(request.items()[requestIndex].id(), updateIndexRequest);
>>>>>>>
// set translated request as replica request
replicaRequest = new BulkItemRequest(request.items()[requestIndex].id(), updateIndexRequest);
<<<<<<<
default:
throw new IllegalStateException("Illegal operation " + updateResult.result.getResponseResult());
}
// NOTE: Breaking out of the retry_on_conflict loop!
break;
} else if (updateResult.failure()) {
Throwable e = updateResult.error;
if (updateResult.retry) {
// updateAttemptCount is 0 based and marks current attempt, if it's equal to retryOnConflict we are going out of the iteration
if (updateAttemptsCount >= updateRequest.retryOnConflict()) {
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE,
new BulkItemResponse.Failure(request.index(), updateRequest.type(), updateRequest.id(), e)));
}
} else {
// nocommit: since we now have RetryOnPrimaryException, retrying doesn't always mean the shard is closed.
// some operations were already perform and have a seqno assigned. we shouldn't just reindex them
// rethrow the failure if we are going to retry on primary and let parent failure to handle it
if (retryPrimaryException(e)) {
// restore updated versions...
for (int j = 0; j < requestIndex; j++) {
applyVersion(request.items()[j], preVersions[j], preVersionTypes[j]);
}
throw (ElasticsearchException) e;
}
// if its a conflict failure, and we already executed the request on a primary (and we execute it
// again, due to primary relocation and only processing up to N bulk items when the shard gets closed)
// then just use the response we got from the successful execution
if (item.getPrimaryResponse() != null && isConflictException(e)) {
setResponse(item, item.getPrimaryResponse());
} else if (updateResult.result == null) {
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, new BulkItemResponse.Failure(request.index(), updateRequest.type(), updateRequest.id(), e)));
} else {
switch (updateResult.result.getResponseResult()) {
case CREATED:
case UPDATED:
IndexRequest indexRequest = updateResult.request();
logFailure(e, "index", request.shardId(), indexRequest);
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE,
new BulkItemResponse.Failure(request.index(), indexRequest.type(), indexRequest.id(), e)));
break;
case DELETED:
DeleteRequest deleteRequest = updateResult.request();
logFailure(e, "delete", request.shardId(), deleteRequest);
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_DELETE,
new BulkItemResponse.Failure(request.index(), deleteRequest.type(), deleteRequest.id(), e)));
break;
default:
throw new IllegalStateException("Illegal operation " + updateResult.result.getResponseResult());
}
}
// NOTE: Breaking out of the retry_on_conflict loop!
break;
=======
>>>>>>> |
<<<<<<<
public static Cast getLegalCast(final String location, final Type actual, final Type expected, final boolean explicit) {
final Cast cast = new Cast(actual, expected, explicit);
=======
public static Cast getLegalCast(final Definition definition, final String location, final Type actual,
final Type expected, final boolean explicit, final boolean internal) {
>>>>>>>
public static Cast getLegalCast(String location, Type actual, Type expected, boolean explicit, boolean internal) {
<<<<<<<
public static Type promoteNumeric(final Type from, final boolean decimal, final boolean primitive) {
=======
public static Type promoteNumeric(final Definition definition, final Type from, final boolean decimal) {
>>>>>>>
public static Type promoteNumeric(Type from, boolean decimal) {
<<<<<<<
return promoteNumeric(from0, from1, true, true);
=======
return promoteNumeric(definition, from0, from1, true);
>>>>>>>
return promoteNumeric(from0, from1, true);
<<<<<<<
return promoteNumeric(from0, from1, false, true);
=======
return promoteNumeric(definition, from0, from1, false);
>>>>>>>
return promoteNumeric(from0, from1, false);
<<<<<<<
final boolean primitive = sort0.primitive && sort1.primitive;
if (sort0.bool && sort1.bool) {
return primitive ? Definition.BOOLEAN_TYPE : Definition.BOOLEAN_OBJ_TYPE;
}
if (sort0.numeric && sort1.numeric) {
return promoteNumeric(from0, from1, true, primitive);
}
return Definition.OBJECT_TYPE;
}
public static Type promoteReference(final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return Definition.DEF_TYPE;
}
=======
>>>>>>>
<<<<<<<
return promoteNumeric(from0, from1, true, true);
=======
return promoteNumeric(definition, from0, from1, true);
>>>>>>>
return promoteNumeric(from0, from1, true); |
<<<<<<<
=======
import com.google.common.collect.ImmutableList;
>>>>>>> |
<<<<<<<
import java.util.Collection;
=======
import java.util.ArrayList;
>>>>>>>
import java.util.Collection;
import java.util.ArrayList; |
<<<<<<<
import com.google.gerrit.server.mime.MimeUtil2Module;
import com.google.gerrit.server.patch.IntraLineWorkerPool;
=======
import com.google.gerrit.server.patch.DiffExecutorModule;
>>>>>>>
import com.google.gerrit.server.mime.MimeUtil2Module;
import com.google.gerrit.server.patch.DiffExecutorModule;
<<<<<<<
modules.add(new IntraLineWorkerPool.Module());
modules.add(new MimeUtil2Module());
=======
modules.add(new MergeabilityChecksExecutorModule());
modules.add(new DiffExecutorModule());
>>>>>>>
modules.add(new DiffExecutorModule());
modules.add(new MimeUtil2Module()); |
<<<<<<<
public AssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) {
super(settings, threadPool, version, namedWriteableRegistry);
final long seed = settings.getAsLong(ElasticsearchIntegrationTest.SETTING_INDEX_SEED, 0l);
=======
public AssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version) {
super(settings, threadPool, version);
final long seed = settings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l);
>>>>>>>
public AssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) {
super(settings, threadPool, version, namedWriteableRegistry);
final long seed = settings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l); |
<<<<<<<
boolean isOpen() {
=======
public void updateBuffer(ByteSizeValue bufferSize) {
config.setBufferSize(bufferSize.bytesAsInt());
try (ReleasableLock lock = writeLock.acquire()) {
current.updateBufferSize(config.getBufferSize());
}
}
/** Returns {@code true} if this {@code Translog} is still open. */
public boolean isOpen() {
>>>>>>>
/** Returns {@code true} if this {@code Translog} is still open. */
public boolean isOpen() {
<<<<<<<
newFile = TranslogWriter.create(config.getType(), shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), config.getBufferSizeBytes());
=======
newFile = TranslogWriter.create(config.getType(), shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), config.getBufferSize(), getChannelFactory());
>>>>>>>
newFile = TranslogWriter.create(config.getType(), shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), config.getBufferSizeBytes(), getChannelFactory()); |
<<<<<<<
final Map<ShardId, Long> indexBufferRAMBytesUsed = new HashMap<>();
=======
final static ByteSizeValue INACTIVE = new ByteSizeValue(-1);
final Map<IndexShard, ByteSizeValue> indexingBuffers = new HashMap<>();
final Map<IndexShard, ByteSizeValue> translogBuffers = new HashMap<>();
final Map<IndexShard, Long> lastIndexTimeNanos = new HashMap<>();
final Set<IndexShard> activeShards = new HashSet<>();
long currentTimeSec = TimeValue.timeValueNanos(System.nanoTime()).seconds();
>>>>>>>
final Map<IndexShard, Long> indexBufferRAMBytesUsed = new HashMap<>();
<<<<<<<
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "4mb").build());
final ShardId shard1 = new ShardId("test", 1);
controller.simulateIndexing(shard1);
controller.assertBuffer(shard1, new ByteSizeValue(1, ByteSizeUnit.MB));
=======
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "10mb")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "100kb").build());
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
controller.assertBuffers(shard0, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
>>>>>>>
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "4mb").build());
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
controller.assertBuffer(shard0, new ByteSizeValue(1, ByteSizeUnit.MB));
<<<<<<<
final ShardId shard2 = new ShardId("test", 2);
controller.simulateIndexing(shard2);
controller.assertBuffer(shard1, new ByteSizeValue(1, ByteSizeUnit.MB));
controller.assertBuffer(shard2, new ByteSizeValue(1, ByteSizeUnit.MB));
=======
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
controller.assertBuffers(shard0, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
controller.assertBuffers(shard1, new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(50, ByteSizeUnit.KB));
>>>>>>>
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
controller.assertBuffer(shard0, new ByteSizeValue(1, ByteSizeUnit.MB));
controller.assertBuffer(shard1, new ByteSizeValue(1, ByteSizeUnit.MB));
<<<<<<<
controller.assertBuffer(shard2, new ByteSizeValue(1, ByteSizeUnit.MB));
=======
controller.assertBuffers(shard1, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
>>>>>>>
controller.assertBuffer(shard1, new ByteSizeValue(1, ByteSizeUnit.MB));
<<<<<<<
final ShardId shard3 = new ShardId("test", 3);
controller.simulateIndexing(shard3);
controller.assertBuffer(shard3, new ByteSizeValue(1, ByteSizeUnit.MB));
=======
IndexShard shard2 = test.getShard(2);
controller.simulateIndexing(shard2);
controller.assertBuffers(shard2, new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(64, ByteSizeUnit.KB)); // translog is maxed at 64K
>>>>>>>
IndexShard shard2 = test.getShard(2);
controller.simulateIndexing(shard2);
controller.assertBuffer(shard2, new ByteSizeValue(1, ByteSizeUnit.MB));
<<<<<<<
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "5mb")
.build());
=======
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
>>>>>>>
createIndex("test", Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0).build());
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
MockController controller = new MockController(Settings.builder()
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "5mb")
.build());
IndexShard shard0 = test.getShard(0);
controller.simulateIndexing(shard0);
IndexShard shard1 = test.getShard(1);
controller.simulateIndexing(shard1);
controller.assertBuffer(shard0, new ByteSizeValue(1, ByteSizeUnit.MB));
controller.assertBuffer(shard1, new ByteSizeValue(1, ByteSizeUnit.MB));
controller.simulateIndexing(shard0);
controller.simulateIndexing(shard1);
controller.assertBuffer(shard0, new ByteSizeValue(2, ByteSizeUnit.MB));
controller.assertBuffer(shard1, new ByteSizeValue(2, ByteSizeUnit.MB));
// index into one shard only, crosses the 5mb limit, so shard1 is refreshed
controller.simulateIndexing(shard0);
controller.simulateIndexing(shard0);
controller.assertBuffer(shard0, new ByteSizeValue(0, ByteSizeUnit.MB));
controller.assertBuffer(shard1, new ByteSizeValue(2, ByteSizeUnit.MB));
controller.simulateIndexing(shard1);
controller.simulateIndexing(shard1);
controller.assertBuffer(shard1, new ByteSizeValue(4, ByteSizeUnit.MB));
controller.simulateIndexing(shard1);
controller.simulateIndexing(shard1);
// shard1 crossed 5 mb and is now cleared:
controller.assertBuffer(shard1, new ByteSizeValue(0, ByteSizeUnit.MB));
}
public void testMinBufferSizes() {
<<<<<<<
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb").build());
=======
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.TRANSLOG_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb")
.put(IndexingMemoryController.MIN_TRANSLOG_BUFFER_SIZE_SETTING, "512kb").build());
>>>>>>>
.put(IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING, "0.001%")
.put(IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING, "6mb").build()); |
<<<<<<<
import java.util.*;
import java.util.concurrent.*;
=======
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
>>>>>>>
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
<<<<<<<
// update the current cluster state
clusterState = newClusterState;
logger.debug("set local cluster state to version {}", newClusterState.version());
for (ClusterStateListener listener : preAppliedListeners) {
try {
logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version());
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
}
=======
// update the current cluster state
clusterState = newClusterState;
logger.debug("set local cluster state to version {}", newClusterState.version());
for (ClusterStateListener listener : preAppliedListeners) {
try {
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
>>>>>>>
// update the current cluster state
clusterState = newClusterState;
logger.debug("set local cluster state to version {}", newClusterState.version());
for (ClusterStateListener listener : preAppliedListeners) {
try {
logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version());
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
<<<<<<<
for (ClusterStateListener listener : postAppliedListeners) {
try {
logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version());
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
}
=======
for (ClusterStateListener listener : postAppliedListeners) {
try {
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
>>>>>>>
for (ClusterStateListener listener : postAppliedListeners) {
try {
logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version());
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex); |
<<<<<<<
=======
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.JoinUtil;
import org.apache.lucene.search.join.ScoreMode;
>>>>>>>
<<<<<<<
import org.elasticsearch.index.query.support.QueryInnerHits;
import org.elasticsearch.index.search.child.ScoreType;
=======
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import org.elasticsearch.search.internal.SearchContext;
>>>>>>>
import org.elasticsearch.index.query.support.QueryInnerHits;
<<<<<<<
int minChildren = HasChildQueryBuilder.DEFAULT_MIN_CHILDREN;
int maxChildren = HasChildQueryBuilder.DEFAULT_MAX_CHILDREN;
int shortCircuitParentDocSet = HasChildQueryBuilder.DEFAULT_SHORT_CIRCUIT_CUTOFF;
=======
int minChildren = 0;
int maxChildren = 0;
>>>>>>>
int minChildren = HasChildQueryBuilder.DEFAULT_MIN_CHILDREN;
int maxChildren = HasChildQueryBuilder.DEFAULT_MAX_CHILDREN;
<<<<<<<
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, maxChildren, minChildren, shortCircuitParentDocSet, scoreType, queryInnerHits);
hasChildQueryBuilder.queryName(queryName);
hasChildQueryBuilder.boost(boost);
return hasChildQueryBuilder;
=======
if (!queryFound) {
throw new QueryParsingException(parseContext, "[has_child] requires 'query' field");
}
if (childType == null) {
throw new QueryParsingException(parseContext, "[has_child] requires 'type' field");
}
Query innerQuery = iq.asQuery(childType);
if (innerQuery == null) {
return null;
}
innerQuery.setBoost(boost);
DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType);
if (childDocMapper == null) {
throw new QueryParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]");
}
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
if (parentFieldMapper.active() == false) {
throw new QueryParsingException(parseContext, "[has_child] _parent field has no parent type configured");
}
if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parseContext.mapperService(), childDocMapper);
String name = innerHits.getName() != null ? innerHits.getName() : childType;
parseContext.addInnerHits(name, parentChildInnerHits);
}
String parentType = parentFieldMapper.type();
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new QueryParsingException(parseContext, "[has_child] Type [" + childType + "] points to a non existent parent type ["
+ parentType + "]");
}
if (maxChildren > 0 && maxChildren < minChildren) {
throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'");
}
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
final Query query;
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
query.setBoost(boost);
return query;
}
public static Query joinUtilHelper(String parentType, ParentChildIndexFieldData parentChildIndexFieldData, Query toQuery, ScoreType scoreType, Query innerQuery, int minChildren, int maxChildren) throws IOException {
ScoreMode scoreMode;
// TODO: move entirely over from ScoreType to org.apache.lucene.join.ScoreMode, when we drop the 1.x parent child code.
switch (scoreType) {
case NONE:
scoreMode = ScoreMode.None;
break;
case MIN:
scoreMode = ScoreMode.Min;
break;
case MAX:
scoreMode = ScoreMode.Max;
break;
case SUM:
scoreMode = ScoreMode.Total;
break;
case AVG:
scoreMode = ScoreMode.Avg;
break;
default:
throw new UnsupportedOperationException("score type [" + scoreType + "] not supported");
}
// 0 in pre 2.x p/c impl means unbounded
if (maxChildren == 0) {
maxChildren = Integer.MAX_VALUE;
}
return new LateParsingQuery(toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode, parentChildIndexFieldData);
>>>>>>>
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, maxChildren, minChildren, scoreType, queryInnerHits);
hasChildQueryBuilder.queryName(queryName);
hasChildQueryBuilder.boost(boost);
return hasChildQueryBuilder; |
<<<<<<<
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction;
=======
>>>>>>>
import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction;
<<<<<<<
import java.util.stream.Collectors;
=======
>>>>>>>
import java.util.function.Consumer;
<<<<<<<
RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider,
GlobalCheckpointSyncAction globalCheckpointSyncAction) {
=======
RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider) {
this(settings, (AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>>) indicesService,
clusterService, threadPool, recoveryTargetService, shardStateAction,
nodeMappingRefreshAction, repositoriesService, restoreService, searchService, syncedFlushService, recoverySource,
nodeServicesProvider);
}
// for tests
IndicesClusterStateService(Settings settings,
AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>> indicesService,
ClusterService clusterService,
ThreadPool threadPool, RecoveryTargetService recoveryTargetService,
ShardStateAction shardStateAction,
NodeMappingRefreshAction nodeMappingRefreshAction,
RepositoriesService repositoriesService, RestoreService restoreService,
SearchService searchService, SyncedFlushService syncedFlushService,
RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider) {
>>>>>>>
RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider,
GlobalCheckpointSyncAction globalCheckpointSyncAction) {
this(settings, (AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>>) indicesService,
clusterService, threadPool, recoveryTargetService, shardStateAction,
nodeMappingRefreshAction, repositoriesService, restoreService, searchService, syncedFlushService, recoverySource,
nodeServicesProvider, globalCheckpointSyncAction);
}
// for tests
IndicesClusterStateService(Settings settings,
AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>> indicesService,
ClusterService clusterService,
ThreadPool threadPool, RecoveryTargetService recoveryTargetService,
ShardStateAction shardStateAction,
NodeMappingRefreshAction nodeMappingRefreshAction,
RepositoriesService repositoriesService, RestoreService restoreService,
SearchService searchService, SyncedFlushService syncedFlushService,
RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider,
GlobalCheckpointSyncAction globalCheckpointSyncAction) {
<<<<<<<
for (ShardRouting shard : routingNode) {
if (!indicesService.hasIndex(shard.index())) {
final IndexMetaData indexMetaData = event.state().metaData().getIndexSafe(shard.index());
if (logger.isDebugEnabled()) {
logger.debug("[{}] creating index", indexMetaData.getIndex());
}
try {
indicesService.createIndex(nodeServicesProvider, indexMetaData, buildInIndexListener,
globalCheckpointSyncAction::updateCheckpointForShard);
} catch (Throwable e) {
sendFailShard(shard, "failed to create index", e);
=======
// create map of indices to create with shards to fail if index creation fails
final Map<Index, List<ShardRouting>> indicesToCreate = new HashMap<>();
for (ShardRouting shardRouting : localRoutingNode) {
if (failedShardsCache.containsKey(shardRouting.shardId()) == false) {
final Index index = shardRouting.index();
if (indicesService.indexService(index) == null) {
indicesToCreate.computeIfAbsent(index, k -> new ArrayList<>()).add(shardRouting);
>>>>>>>
// create map of indices to create with shards to fail if index creation fails
final Map<Index, List<ShardRouting>> indicesToCreate = new HashMap<>();
for (ShardRouting shardRouting : localRoutingNode) {
if (failedShardsCache.containsKey(shardRouting.shardId()) == false) {
final Index index = shardRouting.index();
if (indicesService.indexService(index) == null) {
indicesToCreate.computeIfAbsent(index, k -> new ArrayList<>()).add(shardRouting);
<<<<<<<
if (shardHasBeenRemoved == false) {
try {
indexShard.updateRoutingEntry(shardRouting, event.state().blocks().disableStatePersistence() == false);
if (shardRouting.primary()) {
final IndexShardRoutingTable shardRoutingTable = routingTable.shardRoutingTable(shardRouting.shardId());
Set<String> activeIds = shardRoutingTable.activeShards().stream().map(sr -> sr.allocationId().getId()).collect(Collectors.toSet());
Set<String> initializingIds = shardRoutingTable.getAllInitializingShards().stream().map(sr -> sr.allocationId().getId()).collect(Collectors.toSet());
indexShard.updateAllocationIdsFromMaster(activeIds, initializingIds);
}
} catch (Throwable e) {
failAndRemoveShard(shardRouting, indexService, true, "failed updating shard routing entry", e);
}
}
}
if (shardRouting.initializing()) {
applyInitializingShard(event.state(), indexService, shardRouting);
=======
>>>>>>> |
<<<<<<<
import org.elasticsearch.client.indexlifecycle.PutLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.DeleteLifecyclePolicyRequest;
=======
import org.elasticsearch.client.security.RefreshPolicy;
>>>>>>>
import org.elasticsearch.client.security.RefreshPolicy;
import org.elasticsearch.client.indexlifecycle.PutLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.DeleteLifecyclePolicyRequest; |
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true);
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true);
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true);
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true);
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false);
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false);
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, false);
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, false);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false);
<<<<<<<
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false, true);
=======
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, false);
>>>>>>>
final Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false); |
<<<<<<<
Query filter = this.filter.toQuery(context.searchContext().indexShard().getQueryShardContext());
return new FilterAggregator(name, filter, factories, context, parent, pipelineAggregators, metaData);
=======
IndexSearcher contextSearcher = context.searchContext().searcher();
if (searcher != contextSearcher) {
searcher = contextSearcher;
weight = contextSearcher.createNormalizedWeight(filter, false);
}
return new FilterAggregator(name, weight, factories, context, parent, pipelineAggregators, metaData);
>>>>>>>
IndexSearcher contextSearcher = context.searchContext().searcher();
if (searcher != contextSearcher) {
searcher = contextSearcher;
Query filter = this.filter.toQuery(context.searchContext().indexShard().getQueryShardContext());
weight = contextSearcher.createNormalizedWeight(filter, false);
}
return new FilterAggregator(name, weight, factories, context, parent, pipelineAggregators, metaData); |
<<<<<<<
import org.elasticsearch.search.aggregations.ParsedAggregation;
=======
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before;
>>>>>>>
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before; |
<<<<<<<
public class SimpleQueryStringBuilder extends QueryBuilder {
public static final String NAME = "simple_query_string";
=======
public class SimpleQueryStringBuilder extends QueryBuilder implements BoostableQueryBuilder<SimpleQueryStringBuilder> {
>>>>>>>
public class SimpleQueryStringBuilder extends QueryBuilder implements BoostableQueryBuilder<SimpleQueryStringBuilder> {
public static final String NAME = "simple_query_string";
<<<<<<<
@Override
public String queryId() {
return NAME;
}
=======
>>>>>>>
@Override
public String queryId() {
return NAME;
} |
<<<<<<<
return new ParsedDocument(uidField, versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
=======
return new ParsedDocument(versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
>>>>>>>
return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
<<<<<<<
private final IndexShard reinitWithWrapper(IndexService indexService, IndexShard shard, IndexSearcherWrapper wrapper, IndexingOperationListener... listeners) throws IOException {
ShardRouting routing = new ShardRouting(shard.routingEntry());
shard.close("simon says", true);
IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(),
shard.store(), indexService.cache(), indexService.mapperService(), indexService.similarityService(),
indexService.fieldData(), shard.getEngineFactory(), indexService.getIndexEventListener(), wrapper,
indexService.getThreadPool(), indexService.getBigArrays(), indexService.getSearchSlowLog(), null, listeners
);
ShardRoutingHelper.reinit(routing);
=======
public static final IndexShard reinitWithWrapper(IndexService indexService, IndexShard shard, IndexSearcherWrapper wrapper, IndexingOperationListener... listeners) throws IOException {
IndexShard newShard = newIndexShard(indexService, shard, wrapper, listeners);
return recoverShard(newShard, shard.routingEntry());
}
public static final IndexShard recoverShard(IndexShard newShard, ShardRouting oldRouting) throws IOException {
ShardRouting routing = ShardRoutingHelper.reinit(oldRouting);
>>>>>>>
public static final IndexShard reinitWithWrapper(IndexService indexService, IndexShard shard, IndexSearcherWrapper wrapper, IndexingOperationListener... listeners) throws IOException {
IndexShard newShard = newIndexShard(indexService, shard, wrapper, listeners);
return recoverShard(newShard, shard.routingEntry());
}
public static final IndexShard recoverShard(IndexShard newShard, ShardRouting oldRouting) throws IOException {
ShardRouting routing = ShardRoutingHelper.reinit(oldRouting); |
<<<<<<<
for (String node : new String[]{relocatingReplicaShard.currentNodeId(), relocatingReplicaShard.relocatingNodeId()}) {
List<CapturingTransport.CapturedRequest> requests = transport.capturedRequestsByTargetNode().get(node);
=======
Map<String, List<CapturingTransport.CapturedRequest>> capturedRequestsByTargetNode = transport.getCapturedRequestsByTargetNodeAndClear();
assertPhase(task, "replicating");
for (String node : new String[] {relocatingReplicaShard.currentNodeId(), relocatingReplicaShard.relocatingNodeId()}) {
List<CapturingTransport.CapturedRequest> requests = capturedRequestsByTargetNode.get(node);
>>>>>>>
Map<String, List<CapturingTransport.CapturedRequest>> capturedRequestsByTargetNode = transport.getCapturedRequestsByTargetNodeAndClear();
assertPhase(task, "replicating");
for (String node : new String[] {relocatingReplicaShard.currentNodeId(), relocatingReplicaShard.relocatingNodeId()}) {
List<CapturingTransport.CapturedRequest> requests = capturedRequestsByTargetNode.get(node);
<<<<<<<
Releasable reference = createIndexShardReference(0);
=======
TransportReplicationAction.IndexShardReference reference = getOrCreateIndexShardOperationsCounter();
ShardRouting primaryShard = state.getRoutingTable().shardRoutingTable(shardId).primaryShard();
indexShardRouting.set(primaryShard);
>>>>>>>
TransportReplicationAction.IndexShardReference reference = getOrCreateIndexShardOperationsCounter(0);
ShardRouting primaryShard = state.getRoutingTable().shardRoutingTable(shardId).primaryShard();
indexShardRouting.set(primaryShard);
<<<<<<<
/*
* Returns testIndexShardOperationsCounter or initializes it if it was already created in this test run.
* */
private synchronized TransportReplicationAction.IndexShardReference createIndexShardReference(long primaryTerm) {
=======
private final AtomicBoolean isRelocated = new AtomicBoolean(false);
private final AtomicBoolean isShardFailed = new AtomicBoolean();
private final AtomicReference<ShardRouting> indexShardRouting = new AtomicReference<>();
/**
* Returns testIndexShardOperationsCounter or initializes it if it was already created in this test run.
*/
private synchronized TransportReplicationAction.IndexShardReference getOrCreateIndexShardOperationsCounter() {
>>>>>>>
private final AtomicBoolean isRelocated = new AtomicBoolean(false);
private final AtomicBoolean isShardFailed = new AtomicBoolean();
private final AtomicReference<ShardRouting> indexShardRouting = new AtomicReference<>();
/**
* Returns testIndexShardOperationsCounter or initializes it if it was already created in this test run.
*/
private synchronized TransportReplicationAction.IndexShardReference getOrCreateIndexShardOperationsCounter(long primaryTerm) {
<<<<<<<
return new TransportReplicationAction.IndexShardReference() {
@Override
public long opPrimaryTerm() {
return primaryTerm;
}
=======
return new TransportReplicationAction.IndexShardReference() {
@Override
public boolean isRelocated() {
return isRelocated.get();
}
@Override
public void failShard(String reason, @Nullable Throwable e) {
isShardFailed.set(true);
}
@Override
public ShardRouting routingEntry() {
ShardRouting shardRouting = indexShardRouting.get();
assert shardRouting != null;
return shardRouting;
}
>>>>>>>
return new TransportReplicationAction.IndexShardReference() {
@Override
public boolean isRelocated() {
return isRelocated.get();
}
@Override
public void failShard(String reason, @Nullable Throwable e) {
isShardFailed.set(true);
}
@Override
public ShardRouting routingEntry() {
ShardRouting shardRouting = indexShardRouting.get();
assert shardRouting != null;
return shardRouting;
}
@Override
public long opPrimaryTerm() {
return primaryTerm;
} |
<<<<<<<
BooleanQuery bool = new BooleanQuery();
Query exists = ExistsQueryBuilder.newFilter(context, fieldName);
=======
BooleanQuery.Builder bool = new BooleanQuery.Builder();
Query exists = ExistsQueryParser.newFilter(parseContext, fieldName, null);
>>>>>>>
BooleanQuery.Builder bool = new BooleanQuery.Builder();
Query exists = ExistsQueryBuilder.newFilter(context, fieldName); |
<<<<<<<
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
=======
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
>>>>>>>
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
<<<<<<<
SearchSourceBuilder newBuilder = SearchSourceBuilder.parseSearchSource(parser, parseContext, aggParsers);
assertNotSame(testBuilder, newBuilder);
=======
SearchSourceBuilder newBuilder = SearchSourceBuilder.parseSearchSource(parser, parseContext);
assertNull(parser.nextToken());
>>>>>>>
SearchSourceBuilder newBuilder = SearchSourceBuilder.parseSearchSource(parser, parseContext, aggParsers);
assertNull(parser.nextToken()); |
<<<<<<<
public void onModule(ScriptModule module) {
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExpressionScriptEngineService.class,
ExpressionScriptEngineService.NAME, true));
=======
@Override
public String name() {
return "lang-expression";
}
@Override
public String description() {
return "Lucene expressions integration for Elasticsearch";
}
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
return new ExpressionScriptEngineService(settings);
>>>>>>>
@Override
public ScriptEngineService getScriptEngineService(Settings settings) {
return new ExpressionScriptEngineService(settings); |
<<<<<<<
items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test", 0), "type", String.valueOf(i), i, 1, delete));
=======
items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test", "_na_", 0), "type", String.valueOf(i), 1, delete));
>>>>>>>
items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test", "_na_", 0), "type", String.valueOf(i), i, 1, delete));
<<<<<<<
items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test-" + index, 0), "type", String.valueOf(i), i, 1, delete));
=======
items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test-" + index, "_na_", 0), "type", String.valueOf(i), 1, delete));
>>>>>>>
items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test-" + index, "_na_", 0), "type", String.valueOf(i), i, 1, delete)); |
<<<<<<<
versionMap.putUnderLock(index.uid().bytes(), new VersionValue(updatedVersion, translogLocation));
index.setTranslogLocation(translogLocation);
indexingService.postIndexUnderLock(index);
return created;
} finally {
if (index.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) {
seqNoService.markSeqNoAsCompleted(index.seqNo());
}
}
=======
versionMap.putUnderLock(index.uid().bytes(), new VersionValue(updatedVersion, translogLocation));
index.setTranslogLocation(translogLocation);
return created;
>>>>>>>
versionMap.putUnderLock(index.uid().bytes(), new VersionValue(updatedVersion, translogLocation));
index.setTranslogLocation(translogLocation);
return created;
} finally {
if (index.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) {
seqNoService.markSeqNoAsCompleted(index.seqNo());
}
}
<<<<<<<
delete.updateVersion(updatedVersion, found);
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), translogLocation));
delete.setTranslogLocation(translogLocation);
indexingService.postDeleteUnderLock(delete);
} finally {
if (delete.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) {
seqNoService.markSeqNoAsCompleted(delete.seqNo());
}
}
=======
delete.updateVersion(updatedVersion, found);
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), translogLocation));
delete.setTranslogLocation(translogLocation);
>>>>>>>
delete.updateVersion(updatedVersion, found);
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), translogLocation));
delete.setTranslogLocation(translogLocation);
} finally {
if (delete.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) {
seqNoService.markSeqNoAsCompleted(delete.seqNo());
}
} |
<<<<<<<
sorts = new ArrayList<>();
sort(parser.text());
=======
builder.sort(parser.text());
} else if (context.parseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) {
builder.profile = parser.booleanValue();
>>>>>>>
sort(parser.text());
} else if (context.parseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) {
profile = parser.booleanValue();
<<<<<<<
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
highlightBuilder = xContentBuilder.bytes();
=======
builder.highlightBuilder = HighlightBuilder.PROTOTYPE.fromXContent(context);
>>>>>>>
highlightBuilder = HighlightBuilder.PROTOTYPE.fromXContent(context); |
<<<<<<<
IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.type(), indexRequest.id(), 1, 1, true);
originalResponses.add(new BulkItemResponse(Integer.parseInt(indexRequest.id()), indexRequest.opType().lowercase(), indexResponse));
=======
IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.type(), indexRequest.id(), 1, true);
originalResponses.add(new BulkItemResponse(Integer.parseInt(indexRequest.id()), indexRequest.opType(), indexResponse));
>>>>>>>
IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.type(), indexRequest.id(), 1, 1, true);
originalResponses.add(new BulkItemResponse(Integer.parseInt(indexRequest.id()), indexRequest.opType(), indexResponse)); |
<<<<<<<
=======
protected List<Engine.Operation> generateSingleDocHistory(boolean forReplica, VersionType versionType,
boolean partialOldPrimary, long primaryTerm,
int minOpCount, int maxOpCount, String docId) {
final int numOfOps = randomIntBetween(minOpCount, maxOpCount);
final List<Engine.Operation> ops = new ArrayList<>();
final Term id = newUid(docId);
final int startWithSeqNo;
if (partialOldPrimary) {
startWithSeqNo = randomBoolean() ? numOfOps - 1 : randomIntBetween(0, numOfOps - 1);
} else {
startWithSeqNo = 0;
}
final String valuePrefix = (forReplica ? "r_" : "p_" ) + docId + "_";
final boolean incrementTermWhenIntroducingSeqNo = randomBoolean();
for (int i = 0; i < numOfOps; i++) {
final Engine.Operation op;
final long version;
switch (versionType) {
case INTERNAL:
version = forReplica ? i : Versions.MATCH_ANY;
break;
case EXTERNAL:
version = i;
break;
case EXTERNAL_GTE:
version = randomBoolean() ? Math.max(i - 1, 0) : i;
break;
case FORCE:
version = randomNonNegativeLong();
break;
default:
throw new UnsupportedOperationException("unknown version type: " + versionType);
}
if (randomBoolean()) {
op = new Engine.Index(id, testParsedDocument(docId, null, testDocumentWithTextField(valuePrefix + i), B_1, null),
forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO,
forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm,
version,
forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType,
forReplica ? REPLICA : PRIMARY,
System.currentTimeMillis(), -1, false
);
} else {
op = new Engine.Delete("test", docId, id,
forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO,
forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm,
version,
forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType,
forReplica ? REPLICA : PRIMARY,
System.currentTimeMillis());
}
ops.add(op);
}
return ops;
}
>>>>>>>
<<<<<<<
randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE), false, 2, 2, 20);
assertOpsOnReplica(ops, replicaEngine, true, logger);
=======
randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE), false, 2, 2, 20, "1");
assertOpsOnReplica(ops, replicaEngine, true);
>>>>>>>
randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE), false, 2, 2, 20, "1");
assertOpsOnReplica(ops, replicaEngine, true, logger);
<<<<<<<
final List<Engine.Operation> ops = generateSingleDocHistory(true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), true, 2, 2, 20);
assertOpsOnReplica(ops, replicaEngine, true, logger);
=======
final List<Engine.Operation> ops =
generateSingleDocHistory(true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), true, 2, 2, 20, "1");
assertOpsOnReplica(ops, replicaEngine, true);
}
}
private void assertOpsOnReplica(List<Engine.Operation> ops, InternalEngine replicaEngine, boolean shuffleOps) throws IOException {
final Engine.Operation lastOp = ops.get(ops.size() - 1);
final String lastFieldValue;
if (lastOp instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOp;
lastFieldValue = index.docs().get(0).get("value");
} else {
// delete
lastFieldValue = null;
}
if (shuffleOps) {
int firstOpWithSeqNo = 0;
while (firstOpWithSeqNo < ops.size() && ops.get(firstOpWithSeqNo).seqNo() < 0) {
firstOpWithSeqNo++;
}
// shuffle ops but make sure legacy ops are first
shuffle(ops.subList(0, firstOpWithSeqNo), random());
shuffle(ops.subList(firstOpWithSeqNo, ops.size()), random());
}
boolean firstOp = true;
for (Engine.Operation op : ops) {
logger.info("performing [{}], v [{}], seq# [{}], term [{}]",
op.operationType().name().charAt(0), op.version(), op.seqNo(), op.primaryTerm());
if (op instanceof Engine.Index) {
Engine.IndexResult result = replicaEngine.index((Engine.Index) op);
// replicas don't really care to about creation status of documents
// this allows to ignore the case where a document was found in the live version maps in
// a delete state and return false for the created flag in favor of code simplicity
// as deleted or not. This check is just signal regression so a decision can be made if it's
// intentional
assertThat(result.isCreated(), equalTo(firstOp));
assertThat(result.getVersion(), equalTo(op.version()));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
} else {
Engine.DeleteResult result = replicaEngine.delete((Engine.Delete) op);
// Replicas don't really care to about found status of documents
// this allows to ignore the case where a document was found in the live version maps in
// a delete state and return true for the found flag in favor of code simplicity
// his check is just signal regression so a decision can be made if it's
// intentional
assertThat(result.isFound(), equalTo(firstOp == false));
assertThat(result.getVersion(), equalTo(op.version()));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
}
if (randomBoolean()) {
engine.refresh("test");
}
if (randomBoolean()) {
engine.flush();
engine.refresh("test");
}
firstOp = false;
}
assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1);
if (lastFieldValue != null) {
try (Searcher searcher = replicaEngine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.searcher().search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
>>>>>>>
final List<Engine.Operation> ops = generateSingleDocHistory(true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), true, 2, 2, 20, "1");
assertOpsOnReplica(ops, replicaEngine, true, logger); |
<<<<<<<
final Engine.Index operation = indexShard.prepareIndexOnReplica(sourceToParse, request.seqNo(), request.version(), request.versionType());
=======
final Engine.Index operation = indexShard.prepareIndexOnReplica(sourceToParse, request.version(), request.versionType(), request.getAutoGeneratedTimestamp(), request.isRetry());
>>>>>>>
final Engine.Index operation = indexShard.prepareIndexOnReplica(sourceToParse, request.seqNo(), request.version(), request.versionType(), request.getAutoGeneratedTimestamp(), request.isRetry());
<<<<<<<
IndexResponse response = new IndexResponse(shardId, request.type(), request.id(), request.seqNo(), request.version(), created);
=======
IndexResponse response = new IndexResponse(shardId, request.type(), request.id(), request.version(), operation.isCreated());
>>>>>>>
IndexResponse response = new IndexResponse(shardId, request.type(), request.id(), request.seqNo(), request.version(), operation.isCreated()); |
<<<<<<<
expression.expected = rtn ? Definition.OBJECT_TYPE : expression.actual;
expression = expression.cast(variables);
=======
expression.expected = rtn ? definition.getType("Object") : expression.actual;
expression.internal = rtn;
expression = expression.cast(settings, definition, variables);
>>>>>>>
expression.expected = rtn ? Definition.OBJECT_TYPE : expression.actual;
expression.internal = rtn;
expression = expression.cast(variables); |
<<<<<<<
.setSettings(ImmutableSettings.builder().put("refresh_interval", 9999, TimeUnit.MILLISECONDS)));
=======
.setSettings(Settings.builder().put("refresh_interval", 9999)));
>>>>>>>
.setSettings(Settings.builder().put("refresh_interval", 9999, TimeUnit.MILLISECONDS)));
<<<<<<<
.setSettings(ImmutableSettings.builder().put("refresh_interval", 9999, TimeUnit.MILLISECONDS)).get();
=======
.setSettings(Settings.builder().put("refresh_interval", 9999)).get();
>>>>>>>
.setSettings(Settings.builder().put("refresh_interval", 9999, TimeUnit.MILLISECONDS)).get(); |
<<<<<<<
=======
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.cli.CliTool;
import org.elasticsearch.common.cli.CliToolTestCase;
import org.elasticsearch.common.cli.MockTerminal;
import org.elasticsearch.common.cli.Terminal;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.shield.authc.esusers.FileUserRolesStore;
import org.elasticsearch.shield.authc.support.Hasher;
import org.elasticsearch.shield.authc.support.SecuredStringTests;
>>>>>>> |
<<<<<<<
=======
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
>>>>>>>
import org.elasticsearch.common.ParsingException;
<<<<<<<
public DisMaxQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
=======
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
>>>>>>>
public DisMaxQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { |
<<<<<<<
import org.elasticsearch.common.unit.RatioValue;
import org.elasticsearch.license.plugin.core.XPackLicenseState;
=======
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.license.XPackLicenseState;
>>>>>>>
import org.elasticsearch.license.XPackLicenseState; |
<<<<<<<
return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(),
new CommonStats(indexShard, flags), indexShard.commitStats(), indexShard.seqNoStats());
=======
return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), indexShard.commitStats());
>>>>>>>
return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(),
new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), indexShard.commitStats(), indexShard.seqNoStats()); |
<<<<<<<
addMessage(formatChangeUrl(canonicalWebUrl, u.notes.getChange(),
u.info.getSubject(), edit));
=======
addMessage(formatChangeUrl(canonicalWebUrl, u.change,
u.newCommit.getShortMessage(), edit));
>>>>>>>
String subject;
if (edit) {
try {
subject =
rp.getRevWalk().parseCommit(u.newCommitId).getShortMessage();
} catch (IOException e) {
// Log and fall back to original change subject
log.warn("failed to get subject for edit patch set", e);
subject = u.notes.getChange().getSubject();
}
} else {
subject = u.info.getMessage();
}
addMessage(formatChangeUrl(canonicalWebUrl, u.notes.getChange(),
subject, edit));
<<<<<<<
SetMultimap<ObjectId, Ref> existing = HashMultimap.create();
GroupCollector groupCollector = GroupCollector.create(changeRefsById(), db, psUtil,
notesFactory, project.getNameKey());
=======
SetMultimap<ObjectId, Ref> existing = changeRefsById();
GroupCollector groupCollector = new GroupCollector(changeRefsById(), db);
>>>>>>>
SetMultimap<ObjectId, Ref> existing = changeRefsById();
GroupCollector groupCollector = GroupCollector.create(changeRefsById(), db, psUtil,
notesFactory, project.getNameKey());
<<<<<<<
if (newCommit.equals(priorCommit)) {
// Ignore requests to make the change its current state.
skip = true;
reject(inputCommand, "commit already exists (as current patchset)");
return false;
}
=======
>>>>>>> |
<<<<<<<
=======
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.util.LocaleUtils;
>>>>>>>
import org.elasticsearch.common.inject.Inject;
<<<<<<<
=======
import org.elasticsearch.index.mapper.MappedFieldType;
>>>>>>>
<<<<<<<
public class SimpleQueryStringParser extends BaseQueryParser {
=======
public class SimpleQueryStringParser implements QueryParser {
public static final String NAME = "simple_query_string";
@Inject
public SimpleQueryStringParser() {
}
>>>>>>>
public class SimpleQueryStringParser extends BaseQueryParser {
public static final String NAME = "simple_query_string";
@Inject
public SimpleQueryStringParser() {
}
<<<<<<<
throw new QueryParsingException(parseContext,
"[" + SimpleQueryStringBuilder.NAME + "] query does not support [" + currentFieldName
+ "]");
=======
throw new QueryParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]");
>>>>>>>
throw new QueryParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]");
<<<<<<<
analyzerName = parser.text();
} else if ("field".equals(currentFieldName)) {
field = parser.text();
=======
analyzer = parseContext.analysisService().analyzer(parser.text());
if (analyzer == null) {
throw new QueryParsingException(parseContext, "[" + NAME + "] analyzer [" + parser.text() + "] not found");
}
>>>>>>>
analyzerName = parser.text();
<<<<<<<
// Support specifying only a field instead of a map
if (field == null) {
field = currentFieldName;
}
SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder(queryBody);
qb.boost(boost).fields(fieldsAndWeights).analyzer(analyzerName).queryName(queryName).minimumShouldMatch(minimumShouldMatch);
qb.flags(flags).defaultOperator(defaultOperator).locale(locale).lowercaseExpandedTerms(lowercaseExpandedTerms);
qb.lenient(lenient).analyzeWildcard(analyzeWildcard).boost(boost);
=======
// Use standard analyzer by default
if (analyzer == null) {
analyzer = parseContext.mapperService().searchAnalyzer();
}
if (fieldsAndWeights == null) {
fieldsAndWeights = Collections.singletonMap(parseContext.defaultField(), 1.0F);
}
SimpleQueryParser sqp = new SimpleQueryParser(analyzer, fieldsAndWeights, flags, sqsSettings);
if (defaultOperator != null) {
sqp.setDefaultOperator(defaultOperator);
}
>>>>>>>
SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder(queryBody);
qb.boost(boost).fields(fieldsAndWeights).analyzer(analyzerName).queryName(queryName).minimumShouldMatch(minimumShouldMatch);
qb.flags(flags).defaultOperator(defaultOperator).locale(locale).lowercaseExpandedTerms(lowercaseExpandedTerms);
qb.lenient(lenient).analyzeWildcard(analyzeWildcard).boost(boost); |
<<<<<<<
public static final int SERIALIZATION_FORMAT = 7;
private String id;
private String type;
private long seqNo = -1;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private BytesReference source;
private String routing;
private String parent;
private long timestamp;
private long ttl;
public Index() {
=======
public static final int SERIALIZATION_FORMAT = 6; // since 2.0-beta1 and 1.1
private final String id;
private final String type;
private final long version;
private final VersionType versionType;
private final BytesReference source;
private final String routing;
private final String parent;
private final long timestamp;
private final long ttl;
public Index(StreamInput in) throws IOException {
final int format = in.readVInt(); // SERIALIZATION_FORMAT
assert format == SERIALIZATION_FORMAT : "format was: " + format;
id = in.readString();
type = in.readString();
source = in.readBytesReference();
routing = in.readOptionalString();
parent = in.readOptionalString();
this.version = in.readLong();
this.timestamp = in.readLong();
this.ttl = in.readLong();
this.versionType = VersionType.fromValue(in.readByte());
assert versionType.validateVersionForWrites(this.version);
>>>>>>>
public static final int SERIALIZATION_FORMAT = 7;
private String id;
private String type;
private long seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private BytesReference source;
private String routing;
private String parent;
private long timestamp;
private long ttl;
public Index(StreamInput in) throws IOException {
final int format = in.readVInt(); // SERIALIZATION_FORMAT
assert format >= SERIALIZATION_FORMAT - 1 : "format was: " + format;
id = in.readString();
type = in.readString();
source = in.readBytesReference();
routing = in.readOptionalString();
parent = in.readOptionalString();
this.version = in.readLong();
this.timestamp = in.readLong();
this.ttl = in.readLong();
this.versionType = VersionType.fromValue(in.readByte());
assert versionType.validateVersionForWrites(this.version);
if (format >= 7) {
seqNo = in.readVLong();
}
<<<<<<<
this.seqNo = 0;
this.version = 0;
=======
version = Versions.MATCH_ANY;
versionType = VersionType.INTERNAL;
routing = null;
parent = null;
timestamp = 0;
ttl = 0;
>>>>>>>
this.seqNo = 0;
this.version = 0;
version = Versions.MATCH_ANY;
versionType = VersionType.INTERNAL;
routing = null;
parent = null;
timestamp = 0;
ttl = 0;
<<<<<<<
public void readFrom(StreamInput in) throws IOException {
int version = in.readVInt(); // version
id = in.readString();
type = in.readString();
source = in.readBytesReference();
try {
if (version >= 1) {
if (in.readBoolean()) {
routing = in.readString();
}
}
if (version >= 2) {
if (in.readBoolean()) {
parent = in.readString();
}
}
if (version >= 3) {
this.version = in.readLong();
}
if (version >= 4) {
this.timestamp = in.readLong();
}
if (version >= 5) {
this.ttl = in.readLong();
}
if (version >= 6) {
this.versionType = VersionType.fromValue(in.readByte());
}
if (version >= 7) {
this.seqNo = in.readVLong();
}
} catch (Exception e) {
throw new ElasticsearchException("failed to read [" + type + "][" + id + "]", e);
}
assert versionType.validateVersionForWrites(version);
}
@Override
=======
>>>>>>>
<<<<<<<
this(delete.uid(), delete.seqNo(), delete.version(), delete.versionType());
=======
this.uid = delete.uid();
this.version = delete.version();
this.versionType = delete.versionType();
>>>>>>>
this(delete.uid(), delete.seqNo(), delete.version(), delete.versionType());
<<<<<<<
this(uid, 0, 0, VersionType.EXTERNAL);
=======
this(uid, Versions.MATCH_ANY, VersionType.INTERNAL);
>>>>>>>
this(uid, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL);
<<<<<<<
public void readFrom(StreamInput in) throws IOException {
int version = in.readVInt(); // version
uid = new Term(in.readString(), in.readString());
if (version >= 1) {
this.version = in.readLong();
}
if (version >= 2) {
this.versionType = VersionType.fromValue(in.readByte());
}
if (version >= 3) {
this.seqNo = in.readVLong();
}
assert versionType.validateVersionForWrites(version);
}
@Override
=======
>>>>>>> |
<<<<<<<
=======
import com.google.gerrit.server.logging.TraceContext;
import com.google.gerrit.server.logging.TraceContext.TraceTimer;
import com.google.gerrit.server.update.RefUpdateUtil;
>>>>>>>
import com.google.gerrit.server.logging.TraceContext;
import com.google.gerrit.server.logging.TraceContext.TraceTimer; |
<<<<<<<
this(copy, copy.version(), copy.primaryTerm());
}
public ShardRouting(ShardRouting copy, long version) {
this(copy, version, copy.primaryTerm());
}
public ShardRouting(ShardRouting copy, long version, long primaryTerm) {
this(copy.index(), copy.id(), copy.currentNodeId(), copy.relocatingNodeId(), copy.restoreSource(), primaryTerm, copy.primary(), copy.state(), version, copy.unassignedInfo(), copy.allocationId(), true, copy.getExpectedShardSize());
=======
this(copy.index(), copy.id(), copy.currentNodeId(), copy.relocatingNodeId(), copy.restoreSource(), copy.primary(), copy.state(), copy.unassignedInfo(), copy.allocationId(), true, copy.getExpectedShardSize());
>>>>>>>
this(copy, copy.primaryTerm());
}
public ShardRouting(ShardRouting copy, long primaryTerm) {
this(copy.index(), copy.id(), copy.currentNodeId(), copy.relocatingNodeId(), copy.restoreSource(), primaryTerm, copy.primary(), copy.state(), copy.unassignedInfo(), copy.allocationId(), true, copy.getExpectedShardSize());
<<<<<<<
ShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, RestoreSource restoreSource, long primaryTerm, boolean primary, ShardRoutingState state, long version,
=======
ShardRouting(Index index, int shardId, String currentNodeId,
String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state,
>>>>>>>
ShardRouting(Index index, int shardId, String currentNodeId,
String relocatingNodeId, RestoreSource restoreSource, long primaryTerm, boolean primary, ShardRoutingState state,
<<<<<<<
public static ShardRouting newUnassigned(String index, int shardId, RestoreSource restoreSource, long primaryTerm, boolean primary, UnassignedInfo unassignedInfo) {
return new ShardRouting(index, shardId, null, null, restoreSource, primaryTerm, primary, ShardRoutingState.UNASSIGNED, 0, unassignedInfo, null, true, UNAVAILABLE_EXPECTED_SHARD_SIZE);
=======
public static ShardRouting newUnassigned(Index index, int shardId, RestoreSource restoreSource, boolean primary, UnassignedInfo unassignedInfo) {
return new ShardRouting(index, shardId, null, null, restoreSource, primary, ShardRoutingState.UNASSIGNED, unassignedInfo, null, true, UNAVAILABLE_EXPECTED_SHARD_SIZE);
>>>>>>>
public static ShardRouting newUnassigned(Index index, int shardId, RestoreSource restoreSource, long primaryTerm, boolean primary, UnassignedInfo unassignedInfo) {
return new ShardRouting(index, shardId, null, null, restoreSource, primaryTerm, primary, ShardRoutingState.UNASSIGNED, unassignedInfo, null, true, UNAVAILABLE_EXPECTED_SHARD_SIZE);
<<<<<<<
* Initializes an unassigned shard on a node. If the shard is primary, it's term is incremented.
=======
* Initializes an unassigned shard on a node.
*
* @param existingAllocationId allocation id to use. If null, a fresh allocation id is generated.
>>>>>>>
* Initializes an unassigned shard on a node. If the shard is primary, it's term is incremented.
*
* @param existingAllocationId allocation id to use. If null, a fresh allocation id is generated.
<<<<<<<
allocationId = AllocationId.newInitializing();
if (primary) {
primaryTerm++;
}
=======
if (existingAllocationId == null) {
allocationId = AllocationId.newInitializing();
} else {
allocationId = AllocationId.newInitializing(existingAllocationId);
}
>>>>>>>
if (primary) {
primaryTerm++;
}
if (existingAllocationId == null) {
allocationId = AllocationId.newInitializing();
} else {
allocationId = AllocationId.newInitializing(existingAllocationId);
}
<<<<<<<
sb.append(", v[").append(version).append("]");
sb.append(", t[").append(primaryTerm).append("]");
=======
>>>>>>>
sb.append(", t[").append(primaryTerm).append("]"); |
<<<<<<<
Query expectedQuery = new SpanWithinQuery(new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))),
new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0))));
Query actualQuery = queryParser.parse(spanWithinQuery(spanTermQuery("age", 34), spanTermQuery("age", 35)))
.query();
=======
SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0)));
big.setBoost(2);
SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)));
little.setBoost(3);
Query expectedQuery = new SpanWithinQuery(big, little);
SpanWithinQueryBuilder spanWithinQueryBuilder = spanWithinQuery()
.big(spanTermQuery("age", 34).boost(2))
.little(spanTermQuery("age", 35).boost(3));
Query actualQuery = queryParser.parse(spanWithinQueryBuilder).query();
assertEquals(expectedQuery, actualQuery);
float boost = randomFloat();
expectedQuery.setBoost(boost);
spanWithinQueryBuilder.boost(boost);
actualQuery = queryParser.parse(spanWithinQueryBuilder).query();
>>>>>>>
SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0)));
big.setBoost(2);
SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)));
little.setBoost(3);
Query expectedQuery = new SpanWithinQuery(big, little);
SpanWithinQueryBuilder spanWithinQueryBuilder = spanWithinQuery(spanTermQuery("age", 34).boost(2), spanTermQuery("age", 35).boost(3));
Query actualQuery = queryParser.parse(spanWithinQueryBuilder).query();
assertEquals(expectedQuery, actualQuery);
float boost = randomFloat();
expectedQuery.setBoost(boost);
spanWithinQueryBuilder.boost(boost);
actualQuery = queryParser.parse(spanWithinQueryBuilder).query();
<<<<<<<
Query expectedQuery = new SpanContainingQuery(new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))),
new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0))));
Query actualQuery = queryParser.parse(spanContainingQuery(spanTermQuery("age", 34), spanTermQuery("age", 35))).query();
=======
SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0)));
big.setBoost(2);
SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)));
little.setBoost(3);
Query expectedQuery = new SpanContainingQuery(big, little);
SpanContainingQueryBuilder spanContainingQueryBuilder = spanContainingQuery()
.big(spanTermQuery("age", 34).boost(2))
.little(spanTermQuery("age", 35).boost(3));
Query actualQuery = queryParser.parse(spanContainingQueryBuilder).query();
assertEquals(expectedQuery, actualQuery);
float boost = randomFloat();
expectedQuery.setBoost(boost);
spanContainingQueryBuilder.boost(boost);
actualQuery = queryParser.parse(spanContainingQueryBuilder).query();
>>>>>>>
SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0)));
big.setBoost(2);
SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)));
little.setBoost(3);
Query expectedQuery = new SpanContainingQuery(big, little);
SpanContainingQueryBuilder spanContainingQueryBuilder = spanContainingQuery(spanTermQuery("age", 34).boost(2), spanTermQuery("age", 35).boost(3));
Query actualQuery = queryParser.parse(spanContainingQueryBuilder).query();
assertEquals(expectedQuery, actualQuery);
float boost = randomFloat();
expectedQuery.setBoost(boost);
spanContainingQueryBuilder.boost(boost);
actualQuery = queryParser.parse(spanContainingQueryBuilder).query();
<<<<<<<
=======
public void testSimpleQueryString() throws Exception {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/simple-query-string.json");
Query parsedQuery = queryParser.parse(query).query();
assertThat(parsedQuery, instanceOf(BooleanQuery.class));
}
@Test
public void testSimpleQueryStringBoost() throws Exception {
IndexQueryParserService queryParser = queryParser();
SimpleQueryStringBuilder simpleQueryStringBuilder = new SimpleQueryStringBuilder("test");
simpleQueryStringBuilder.field("body", 5);
Query parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query();
assertThat(parsedQuery, instanceOf(TermQuery.class));
assertThat(parsedQuery.getBoost(), equalTo(5f));
simpleQueryStringBuilder = new SimpleQueryStringBuilder("test");
simpleQueryStringBuilder.field("body", 5);
simpleQueryStringBuilder.boost(2);
parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query();
assertThat(parsedQuery, instanceOf(TermQuery.class));
assertThat(parsedQuery.getBoost(), equalTo(10f));
}
@Test
>>>>>>>
public void testSimpleQueryString() throws Exception {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/simple-query-string.json");
Query parsedQuery = queryParser.parse(query).query();
assertThat(parsedQuery, instanceOf(BooleanQuery.class));
}
@Test
public void testSimpleQueryStringBoost() throws Exception {
IndexQueryParserService queryParser = queryParser();
SimpleQueryStringBuilder simpleQueryStringBuilder = new SimpleQueryStringBuilder("test");
simpleQueryStringBuilder.field("body", 5);
Query parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query();
assertThat(parsedQuery, instanceOf(TermQuery.class));
assertThat(parsedQuery.getBoost(), equalTo(5f));
simpleQueryStringBuilder = new SimpleQueryStringBuilder("test");
simpleQueryStringBuilder.field("body", 5);
simpleQueryStringBuilder.boost(2);
parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query();
assertThat(parsedQuery, instanceOf(TermQuery.class));
assertThat(parsedQuery.getBoost(), equalTo(10f));
}
@Test |
<<<<<<<
@Override
public void testMustRewrite() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
super.testMustRewrite();
}
public void testRewrite() throws IOException {
FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()));
FunctionScoreQueryBuilder rewrite = (FunctionScoreQueryBuilder) functionScoreQueryBuilder.rewrite(queryShardContext());
assertNotSame(functionScoreQueryBuilder, rewrite);
assertEquals(rewrite.query(), new TermQueryBuilder("foo", "bar"));
}
public void testRewriteWithFunction() throws IOException {
TermQueryBuilder secondFunction = new TermQueryBuilder("tq", "2");
QueryBuilder queryBuilder = randomBoolean() ? new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()) : new TermQueryBuilder("foo", "bar");
FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(queryBuilder,
new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
new FunctionScoreQueryBuilder.FilterFunctionBuilder(new WrapperQueryBuilder(new TermQueryBuilder("tq", "1").toString()), new RandomScoreFunctionBuilder()),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(secondFunction, new RandomScoreFunctionBuilder())
});
FunctionScoreQueryBuilder rewrite = (FunctionScoreQueryBuilder) functionScoreQueryBuilder.rewrite(queryShardContext());
assertNotSame(functionScoreQueryBuilder, rewrite);
assertEquals(rewrite.query(), new TermQueryBuilder("foo", "bar"));
assertEquals(rewrite.filterFunctionBuilders()[0].getFilter(), new TermQueryBuilder("tq", "1"));
assertSame(rewrite.filterFunctionBuilders()[1].getFilter(), secondFunction);
}
=======
public void testQueryMalformedArrayNotSupported() throws IOException {
String json =
"{\n" +
" \"function_score\" : {\n" +
" \"not_supported\" : []\n" +
" }\n" +
"}";
try {
parseQuery(json);
fail("parse should have failed");
} catch (ParsingException e) {
assertThat(e.getMessage(), containsString("array [not_supported] is not supported"));
}
}
public void testQueryMalformedFieldNotSupported() throws IOException {
String json =
"{\n" +
" \"function_score\" : {\n" +
" \"not_supported\" : \"value\"\n" +
" }\n" +
"}";
try {
parseQuery(json);
fail("parse should have failed");
} catch (ParsingException e) {
assertThat(e.getMessage(), containsString("field [not_supported] is not supported"));
}
}
public void testMalformedQueryFunctionFieldNotSupported() throws IOException {
String json =
"{\n" +
" \"function_score\" : {\n" +
" \"functions\" : [ {\n" +
" \"not_supported\" : 23.0\n" +
" }\n" +
" }\n" +
"}";
try {
parseQuery(json);
fail("parse should have failed");
} catch (ParsingException e) {
assertThat(e.getMessage(), containsString("field [not_supported] is not supported"));
}
}
public void testMalformedQuery() throws IOException {
//verify that an error is thrown rather than setting the query twice (https://github.com/elastic/elasticsearch/issues/16583)
String json =
"{\n" +
" \"function_score\":{\n" +
" \"query\":{\n" +
" \"bool\":{\n" +
" \"must\":{\"match\":{\"field\":\"value\"}}" +
" },\n" +
" \"ignored_field_name\": {\n" +
" {\"match\":{\"field\":\"value\"}}\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
"}";
try {
parseQuery(json);
fail("parse should have failed");
} catch(ParsingException e) {
assertThat(e.getMessage(), containsString("[query] is already defined."));
}
}
>>>>>>>
@Override
public void testMustRewrite() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
super.testMustRewrite();
}
public void testRewrite() throws IOException {
FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()));
FunctionScoreQueryBuilder rewrite = (FunctionScoreQueryBuilder) functionScoreQueryBuilder.rewrite(queryShardContext());
assertNotSame(functionScoreQueryBuilder, rewrite);
assertEquals(rewrite.query(), new TermQueryBuilder("foo", "bar"));
}
public void testRewriteWithFunction() throws IOException {
TermQueryBuilder secondFunction = new TermQueryBuilder("tq", "2");
QueryBuilder queryBuilder = randomBoolean() ? new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()) : new TermQueryBuilder("foo", "bar");
FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(queryBuilder,
new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
new FunctionScoreQueryBuilder.FilterFunctionBuilder(new WrapperQueryBuilder(new TermQueryBuilder("tq", "1").toString()), new RandomScoreFunctionBuilder()),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(secondFunction, new RandomScoreFunctionBuilder())
});
FunctionScoreQueryBuilder rewrite = (FunctionScoreQueryBuilder) functionScoreQueryBuilder.rewrite(queryShardContext());
assertNotSame(functionScoreQueryBuilder, rewrite);
assertEquals(rewrite.query(), new TermQueryBuilder("foo", "bar"));
assertEquals(rewrite.filterFunctionBuilders()[0].getFilter(), new TermQueryBuilder("tq", "1"));
assertSame(rewrite.filterFunctionBuilders()[1].getFilter(), secondFunction);
}
public void testQueryMalformedArrayNotSupported() throws IOException {
String json =
"{\n" +
" \"function_score\" : {\n" +
" \"not_supported\" : []\n" +
" }\n" +
"}";
try {
parseQuery(json);
fail("parse should have failed");
} catch (ParsingException e) {
assertThat(e.getMessage(), containsString("array [not_supported] is not supported"));
}
}
public void testQueryMalformedFieldNotSupported() throws IOException {
String json =
"{\n" +
" \"function_score\" : {\n" +
" \"not_supported\" : \"value\"\n" +
" }\n" +
"}";
try {
parseQuery(json);
fail("parse should have failed");
} catch (ParsingException e) {
assertThat(e.getMessage(), containsString("field [not_supported] is not supported"));
}
}
public void testMalformedQueryFunctionFieldNotSupported() throws IOException {
String json =
"{\n" +
" \"function_score\" : {\n" +
" \"functions\" : [ {\n" +
" \"not_supported\" : 23.0\n" +
" }\n" +
" }\n" +
"}";
try {
parseQuery(json);
fail("parse should have failed");
} catch (ParsingException e) {
assertThat(e.getMessage(), containsString("field [not_supported] is not supported"));
}
}
public void testMalformedQuery() throws IOException {
//verify that an error is thrown rather than setting the query twice (https://github.com/elastic/elasticsearch/issues/16583)
String json =
"{\n" +
" \"function_score\":{\n" +
" \"query\":{\n" +
" \"bool\":{\n" +
" \"must\":{\"match\":{\"field\":\"value\"}}" +
" },\n" +
" \"ignored_field_name\": {\n" +
" {\"match\":{\"field\":\"value\"}}\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
"}";
try {
parseQuery(json);
fail("parse should have failed");
} catch(ParsingException e) {
assertThat(e.getMessage(), containsString("[query] is already defined."));
}
} |
<<<<<<<
import com.google.gerrit.index.QueryOptions;
import com.google.gerrit.index.Schema;
import com.google.gerrit.index.query.DataSource;
import com.google.gerrit.index.query.Predicate;
import com.google.gerrit.index.query.QueryParseException;
=======
import com.google.gerrit.elasticsearch.builders.QueryBuilder;
import com.google.gerrit.elasticsearch.builders.SearchSourceBuilder;
>>>>>>>
import com.google.gerrit.elasticsearch.builders.QueryBuilder;
import com.google.gerrit.elasticsearch.builders.SearchSourceBuilder;
import com.google.gerrit.index.QueryOptions;
import com.google.gerrit.index.Schema;
import com.google.gerrit.index.query.DataSource;
import com.google.gerrit.index.query.Predicate;
import com.google.gerrit.index.query.QueryParseException;
<<<<<<<
import io.searchbox.client.JestResult;
import io.searchbox.core.Bulk;
import io.searchbox.core.Bulk.Builder;
import io.searchbox.core.Search;
import io.searchbox.core.search.sort.Sort;
import io.searchbox.core.search.sort.Sort.Sorting;
=======
import com.google.inject.assistedinject.AssistedInject;
>>>>>>>
import com.google.inject.assistedinject.AssistedInject;
<<<<<<<
JestClientBuilder clientBuilder,
@Assisted Schema<InternalGroup> schema) {
super(cfg, sitePaths, schema, clientBuilder, GROUPS);
=======
ElasticRestClientBuilder clientBuilder,
@Assisted Schema<AccountGroup> schema) {
// No parts of FillArgs are currently required, just use null.
super(cfg, null, sitePaths, schema, clientBuilder, GROUPS);
>>>>>>>
ElasticRestClientBuilder clientBuilder,
@Assisted Schema<InternalGroup> schema) {
super(cfg, sitePaths, schema, clientBuilder, GROUPS);
<<<<<<<
public void replace(InternalGroup group) throws IOException {
Bulk bulk =
new Bulk.Builder()
.defaultIndex(indexName)
.defaultType(GROUPS)
.addAction(insert(GROUPS, group))
.refresh(true)
.build();
JestResult result = client.execute(bulk);
if (!result.isSucceeded()) {
=======
public void replace(AccountGroup group) throws IOException {
String bulk = toAction(GROUPS, getId(group), INDEX);
bulk += toDoc(group);
String uri = getURI(GROUPS, BULK);
Response response = performRequest(HttpPost.METHOD_NAME, bulk, uri, getRefreshParam());
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
>>>>>>>
public void replace(InternalGroup group) throws IOException {
String bulk = toAction(GROUPS, getId(group), INDEX);
bulk += toDoc(group);
String uri = getURI(GROUPS, BULK);
Response response = performRequest(HttpPost.METHOD_NAME, bulk, uri, getRefreshParam());
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
<<<<<<<
private class QuerySource implements DataSource<InternalGroup> {
private final Search search;
=======
private class QuerySource implements DataSource<AccountGroup> {
private final String search;
>>>>>>>
private class QuerySource implements DataSource<InternalGroup> {
private final String search;
<<<<<<<
List<InternalGroup> results = Collections.emptyList();
JestResult result = client.execute(search);
if (result.isSucceeded()) {
JsonObject obj = result.getJsonObject().getAsJsonObject("hits");
=======
List<AccountGroup> results = Collections.emptyList();
String uri = getURI(GROUPS, SEARCH);
Response response =
performRequest(HttpPost.METHOD_NAME, search, uri, Collections.emptyMap());
StatusLine statusLine = response.getStatusLine();
if (statusLine.getStatusCode() == HttpStatus.SC_OK) {
String content = getContent(response);
JsonObject obj =
new JsonParser().parse(content).getAsJsonObject().getAsJsonObject("hits");
>>>>>>>
List<InternalGroup> results = Collections.emptyList();
String uri = getURI(GROUPS, SEARCH);
Response response =
performRequest(HttpPost.METHOD_NAME, search, uri, Collections.emptyMap());
StatusLine statusLine = response.getStatusLine();
if (statusLine.getStatusCode() == HttpStatus.SC_OK) {
String content = getContent(response);
JsonObject obj =
new JsonParser().parse(content).getAsJsonObject().getAsJsonObject("hits");
<<<<<<<
@Override
public String toString() {
return search.toString();
}
private Optional<InternalGroup> toInternalGroup(JsonElement json) {
=======
private AccountGroup toAccountGroup(JsonElement json) {
>>>>>>>
private Optional<InternalGroup> toAccountGroup(JsonElement json) { |
<<<<<<<
public static final Setting<Boolean> WRITE_PORTS_FIELD_SETTING = Setting.boolSetting("node.portsfile", false, false, Setting.Scope.CLUSTER);
public static final Setting<Boolean> NODE_DATA_SETTING = Setting.boolSetting("node.data", true, false, Setting.Scope.CLUSTER);
public static final Setting<Boolean> NODE_MASTER_SETTING = Setting.boolSetting("node.master", true, false, Setting.Scope.CLUSTER);
public static final Setting<Boolean> NODE_LOCAL_SETTING = Setting.boolSetting("node.local", false, false, Setting.Scope.CLUSTER);
public static final Setting<String> NODE_MODE_SETTING = new Setting<>("node.mode", "network", Function.identity(), false, Setting.Scope.CLUSTER);
public static final Setting<Boolean> NODE_INGEST_SETTING = Setting.boolSetting("node.ingest", true, false, Setting.Scope.CLUSTER);
public static final Setting<String> NODE_NAME_SETTING = Setting.simpleString("node.name", false, Setting.Scope.CLUSTER);
// this sucks that folks can mistype data, master or ingest and get away with it.
=======
public static final Setting<Boolean> WRITE_PORTS_FIELD_SETTING =
Setting.boolSetting("node.portsfile", false, Property.NodeScope);
public static final Setting<Boolean> NODE_CLIENT_SETTING =
Setting.boolSetting("node.client", false, Property.NodeScope);
public static final Setting<Boolean> NODE_DATA_SETTING = Setting.boolSetting("node.data", true, Property.NodeScope);
public static final Setting<Boolean> NODE_MASTER_SETTING =
Setting.boolSetting("node.master", true, Property.NodeScope);
public static final Setting<Boolean> NODE_LOCAL_SETTING =
Setting.boolSetting("node.local", false, Property.NodeScope);
public static final Setting<String> NODE_MODE_SETTING =
new Setting<>("node.mode", "network", Function.identity(), Property.NodeScope);
public static final Setting<Boolean> NODE_INGEST_SETTING =
Setting.boolSetting("node.ingest", true, Property.NodeScope);
public static final Setting<String> NODE_NAME_SETTING = Setting.simpleString("node.name", Property.NodeScope);
// this sucks that folks can mistype client etc and get away with it.
>>>>>>>
public static final Setting<Boolean> WRITE_PORTS_FIELD_SETTING =
Setting.boolSetting("node.portsfile", false, Property.NodeScope);
public static final Setting<Boolean> NODE_CLIENT_SETTING =
Setting.boolSetting("node.client", false, Property.NodeScope);
public static final Setting<Boolean> NODE_DATA_SETTING = Setting.boolSetting("node.data", true, Property.NodeScope);
public static final Setting<Boolean> NODE_MASTER_SETTING =
Setting.boolSetting("node.master", true, Property.NodeScope);
public static final Setting<Boolean> NODE_LOCAL_SETTING =
Setting.boolSetting("node.local", false, Property.NodeScope);
public static final Setting<String> NODE_MODE_SETTING =
new Setting<>("node.mode", "network", Function.identity(), Property.NodeScope);
public static final Setting<Boolean> NODE_INGEST_SETTING =
Setting.boolSetting("node.ingest", true, Property.NodeScope);
public static final Setting<String> NODE_NAME_SETTING = Setting.simpleString("node.name", Property.NodeScope);
// this sucks that folks can mistype data, master or ingest and get away with it. |
<<<<<<<
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import org.elasticsearch.index.seqno.SequenceNumbersService;
=======
import org.elasticsearch.index.mapper.RootObjectMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.UidFieldMapper;
>>>>>>>
import org.elasticsearch.index.mapper.RootObjectMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.seqno.SequenceNumbersService;
<<<<<<<
final Engine.Index operation = new Engine.Index(newUid("test#1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, i, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime());
=======
final Engine.Index operation = new Engine.Index(newUid("test#1"), doc, i, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
final Engine.Index operation = new Engine.Index(newUid("test#1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, i, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
engine.index(new Engine.Index(newUid("3"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY,
VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY,
System.nanoTime() - engine.engineConfig.getFlushMergesAfter().nanos()));
=======
engine.index(new Engine.Index(newUid("3"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime() - engine.engineConfig.getFlushMergesAfter().nanos(), -1, false));
>>>>>>>
engine.index(new Engine.Index(newUid("3"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime() - engine.engineConfig.getFlushMergesAfter().nanos(), -1, false));
<<<<<<<
create = new Engine.Index(newUid("1"), doc, create.seqNo(), create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
=======
create = new Engine.Index(newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
>>>>>>>
create = new Engine.Index(newUid("1"), doc, create.seqNo(), create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
<<<<<<<
index = new Engine.Index(newUid("1"), doc, index.seqNo(), index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
=======
index = new Engine.Index(newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
>>>>>>>
index = new Engine.Index(newUid("1"), doc, index.seqNo(), index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
<<<<<<<
index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 13, VersionType.EXTERNAL, PRIMARY, 0);
=======
index = new Engine.Index(newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0, -1, false);
>>>>>>>
index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 13, VersionType.EXTERNAL, PRIMARY, 0, -1, false);
<<<<<<<
Engine.Index create = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0);
=======
Engine.Index create = new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false);
>>>>>>>
Engine.Index create = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false);
<<<<<<<
create = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0);
=======
create = new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false);
>>>>>>>
create = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false);
<<<<<<<
index = new Engine.Index(newUid("1"), doc, index.seqNo(), index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
=======
index = new Engine.Index(newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
>>>>>>>
index = new Engine.Index(newUid("1"), doc, index.seqNo(), index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
<<<<<<<
index = new Engine.Index(newUid("1"), doc, index.seqNo(), 1L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
=======
index = new Engine.Index(newUid("1"), doc, 1L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
>>>>>>>
index = new Engine.Index(newUid("1"), doc, index.seqNo(), 1L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
<<<<<<<
index = new Engine.Index(newUid("1"), doc, index.seqNo(), 2L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
=======
index = new Engine.Index(newUid("1"), doc, 2L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
>>>>>>>
index = new Engine.Index(newUid("1"), doc, index.seqNo(), 2L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
<<<<<<<
index = new Engine.Index(newUid("1"), doc, index.seqNo(), 1L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
=======
index = new Engine.Index(newUid("1"), doc, 1L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
>>>>>>>
index = new Engine.Index(newUid("1"), doc, index.seqNo(), 1L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
<<<<<<<
index = new Engine.Index(newUid("1"), doc, index.seqNo(), 2L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
=======
index = new Engine.Index(newUid("1"), doc, 2L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
>>>>>>>
index = new Engine.Index(newUid("1"), doc, index.seqNo(), 2L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false);
<<<<<<<
engine.index(new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
=======
engine.index(new Engine.Index(newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false));
>>>>>>>
engine.index(new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false));
<<<<<<<
engine.index(new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
=======
engine.index(new Engine.Index(newUid("1"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false));
>>>>>>>
engine.index(new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false));
<<<<<<<
engine.index(new Engine.Index(newUid("2"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
=======
engine.index(new Engine.Index(newUid("2"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false));
>>>>>>>
engine.index(new Engine.Index(newUid("2"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false));
<<<<<<<
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, SequenceNumbersService.UNASSIGNED_SEQ_NO, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
=======
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index firstIndexRequest = new Engine.Index(newUid(uuidValue), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index firstIndexRequest = new Engine.Index(newUid(uuidValue), doc, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index firstIndexRequest = new Engine.Index(newUid(uuidValue), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index idxRequest = new Engine.Index(newUid(uuidValue), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index idxRequest = new Engine.Index(newUid(uuidValue), doc, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index idxRequest = new Engine.Index(newUid(uuidValue), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
<<<<<<<
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime());
=======
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false);
>>>>>>>
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); |
<<<<<<<
public MetaDataIndexAliasesService(
Settings settings,
ClusterService clusterService,
IndicesService indicesService,
AliasValidator aliasValidator,
NodeServicesProvider nodeServicesProvider,
MetaDataDeleteIndexService deleteIndexService) {
=======
public MetaDataIndexAliasesService(Settings settings, ClusterService clusterService, IndicesService indicesService,
AliasValidator aliasValidator, MetaDataDeleteIndexService deleteIndexService) {
>>>>>>>
public MetaDataIndexAliasesService(
Settings settings,
ClusterService clusterService,
IndicesService indicesService,
AliasValidator aliasValidator,
MetaDataDeleteIndexService deleteIndexService) {
<<<<<<<
indexService = indicesService.createIndex(nodeServicesProvider, index, emptyList(), shardId -> {});
=======
indexService = indicesService.createIndex(index, emptyList());
>>>>>>>
indexService = indicesService.createIndex(index, emptyList(), shardId -> {}); |
<<<<<<<
void analyze(Variables variables) {
expression.expected = Definition.OBJECT_TYPE;
expression.analyze(variables);
expression = expression.cast(variables);
=======
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
expression.expected = definition.getType("Object");
expression.internal = true;
expression.analyze(settings, definition, variables);
expression = expression.cast(settings, definition, variables);
>>>>>>>
void analyze(Variables variables) {
expression.expected = Definition.OBJECT_TYPE;
expression.internal = true;
expression.analyze(variables);
expression = expression.cast(variables); |
<<<<<<<
=======
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.HppcMaps;
>>>>>>>
import org.elasticsearch.common.bytes.BytesArray; |
<<<<<<<
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ExecutionException;
=======
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
>>>>>>>
import java.lang.reflect.Proxy;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.ExecutionException; |
<<<<<<<
private final Index index;
//norelease this flag is also used in the QueryShardContext, we need to make sure we set it there correctly in doToQuery()
private ParseFieldMatcher parseFieldMatcher;
=======
private ParseFieldMatcher parseFieldMatcher = ParseFieldMatcher.EMPTY;
>>>>>>>
private final Index index;
//norelease this flag is also used in the QueryShardContext, we need to make sure we set it there correctly in doToQuery()
private ParseFieldMatcher parseFieldMatcher = ParseFieldMatcher.EMPTY;
<<<<<<<
this.indicesQueriesRegistry = registry;
this.shardContext = null;
=======
this.indexVersionCreated = Version.indexCreated(indexQueryParser.indexSettings());
this.indexQueryParser = indexQueryParser;
}
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
if (parseFieldMatcher == null) {
throw new IllegalArgumentException("parseFieldMatcher must not be null");
}
this.parseFieldMatcher = parseFieldMatcher;
>>>>>>>
this.indicesQueriesRegistry = registry;
this.shardContext = null;
<<<<<<<
=======
if (parser != null) {
this.parser.setParseFieldMatcher(parseFieldMatcher);
}
this.namedQueries.clear();
this.nestedScope = new NestedScope();
this.isFilter = false;
}
public Index index() {
return this.index;
>>>>>>>
if (parser != null) {
this.parser.setParseFieldMatcher(parseFieldMatcher);
} |
<<<<<<<
public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
XContentParser.Token token;
String fieldName = null;
SmoothingModel model = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) {
model = LinearInterpolation.PROTOTYPE.innerFromXContent(parseContext);
} else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) {
model = Laplace.PROTOTYPE.innerFromXContent(parseContext);
} else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) {
model = StupidBackoff.PROTOTYPE.innerFromXContent(parseContext);
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"[smoothing] unknown token [" + token + "] after [" + fieldName + "]");
}
}
return model;
}
public abstract SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException;
=======
@Override
public final int hashCode() {
/*
* Override hashCode here and forward to an abstract method to force extensions of this class to override hashCode in the same
* way that we force them to override equals. This also prevents false positives in CheckStyle's EqualsHashCode check.
*/
return doHashCode();
}
public abstract SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException;
>>>>>>>
public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
XContentParser.Token token;
String fieldName = null;
SmoothingModel model = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) {
model = LinearInterpolation.PROTOTYPE.innerFromXContent(parseContext);
} else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) {
model = Laplace.PROTOTYPE.innerFromXContent(parseContext);
} else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) {
model = StupidBackoff.PROTOTYPE.innerFromXContent(parseContext);
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"[smoothing] unknown token [" + token + "] after [" + fieldName + "]");
}
}
return model;
}
public abstract SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException;
@Override
public final int hashCode() {
/*
* Override hashCode here and forward to an abstract method to force extensions of this class to override hashCode in the same
* way that we force them to override equals. This also prevents false positives in CheckStyle's EqualsHashCode check.
*/
return doHashCode();
} |
<<<<<<<
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, 0, primaryNode, relocatingNode, null, primaryTerm,
true, primaryState, 0, unassignedInfo));
=======
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, 0, primaryNode, relocatingNode, null, true, primaryState, unassignedInfo));
>>>>>>>
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, 0, primaryNode, relocatingNode, null, primaryTerm, true,
primaryState, unassignedInfo));
<<<<<<<
TestShardRouting.newShardRouting(index, shardId.id(), replicaNode, relocatingNode, null, primaryTerm, false,
replicaState, 0, unassignedInfo));
=======
TestShardRouting.newShardRouting(index, shardId.id(), replicaNode, relocatingNode, null, false, replicaState, unassignedInfo));
>>>>>>>
TestShardRouting.newShardRouting(index, shardId.id(), replicaNode, relocatingNode, null, primaryTerm, false, replicaState, unassignedInfo));
<<<<<<<
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index);
final int primaryTerm = randomInt(200);
=======
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(indexMetaData.getIndex());
>>>>>>>
IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(indexMetaData.getIndex());
final int primaryTerm = randomInt(200);
<<<<<<<
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(0).id(), null, null, primaryTerm, true,
ShardRoutingState.STARTED, 0, null));
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(1).id(), null, null, primaryTerm, false,
ShardRoutingState.STARTED, 0, null));
=======
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(0).id(), null, null, true, ShardRoutingState.STARTED, null));
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(1).id(), null, null, false, ShardRoutingState.STARTED, null));
>>>>>>>
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(0).id(), null, null, primaryTerm, true,
ShardRoutingState.STARTED, null));
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(1).id(), null, null, primaryTerm, false,
ShardRoutingState.STARTED, null)); |
<<<<<<<
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("_index".equals(currentFieldName)) {
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in bulk is not allowed");
}
index = parser.text();
} else if ("_type".equals(currentFieldName)) {
type = parser.text();
} else if ("_id".equals(currentFieldName)) {
id = parser.text();
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
parent = parser.text();
} else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) {
timestamp = parser.text();
} else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) {
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
ttl = TimeValue.parseTimeValue(parser.text(), null, currentFieldName).millis();
=======
if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("_index".equals(currentFieldName)) {
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in bulk is not allowed");
}
index = parser.text();
} else if ("_type".equals(currentFieldName)) {
type = parser.text();
} else if ("_id".equals(currentFieldName)) {
id = parser.text();
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
parent = parser.text();
} else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) {
timestamp = parser.text();
} else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) {
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
ttl = TimeValue.parseTimeValue(parser.text(), null).millis();
} else {
ttl = parser.longValue();
}
} else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) {
opType = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
versionType = VersionType.fromString(parser.text());
} else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) {
retryOnConflict = parser.intValue();
>>>>>>>
if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("_index".equals(currentFieldName)) {
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in bulk is not allowed");
}
index = parser.text();
} else if ("_type".equals(currentFieldName)) {
type = parser.text();
} else if ("_id".equals(currentFieldName)) {
id = parser.text();
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
parent = parser.text();
} else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) {
timestamp = parser.text();
} else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) {
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
ttl = TimeValue.parseTimeValue(parser.text(), null, currentFieldName).millis();
} else {
ttl = parser.longValue();
}
} else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) {
opType = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
versionType = VersionType.fromString(parser.text());
} else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) {
retryOnConflict = parser.intValue(); |
<<<<<<<
public void onModule(SettingsModule settingsModule) {
settingsModule.registerSetting(AzureComputeService.Management.ENDPOINT_SETTING);
=======
@Override
public String name() {
return AzureDiscoveryClusterFormationTests.class.getName();
}
@Override
public String description() {
return AzureDiscoveryClusterFormationTests.class.getName();
}
@Override
public List<Setting<?>> getSettings() {
return Arrays.asList(AzureComputeService.Management.ENDPOINT_SETTING);
>>>>>>>
@Override
public List<Setting<?>> getSettings() {
return Arrays.asList(AzureComputeService.Management.ENDPOINT_SETTING); |
<<<<<<<
import com.amazonaws.services.s3.AmazonS3;
=======
import com.amazonaws.ClientConfiguration;
>>>>>>>
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.ClientConfiguration;
<<<<<<<
// If the user defined a path style access setting, we rely on it otherwise we use the default
// value set by the SDK
Boolean pathStyleAccess = null;
if (Repository.PATH_STYLE_ACCESS_SETTING.exists(repositorySettings.settings()) ||
Repositories.PATH_STYLE_ACCESS_SETTING.exists(repositorySettings.globalSettings())) {
pathStyleAccess = getValue(repositorySettings, Repository.PATH_STYLE_ACCESS_SETTING, Repositories.PATH_STYLE_ACCESS_SETTING);
}
logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], buffer_size [{}], max_retries [{}], canned_acl [{}], storage_class [{}], path_style_access [{}]",
bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, cannedACL, storageClass,
pathStyleAccess);
=======
logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], " +
"buffer_size [{}], max_retries [{}], use_throttle_retries [{}], cannedACL [{}], storageClass [{}]",
bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, useThrottleRetries, cannedACL,
storageClass);
>>>>>>>
// If the user defined a path style access setting, we rely on it otherwise we use the default
// value set by the SDK
Boolean pathStyleAccess = null;
if (Repository.PATH_STYLE_ACCESS_SETTING.exists(repositorySettings.settings()) ||
Repositories.PATH_STYLE_ACCESS_SETTING.exists(repositorySettings.globalSettings())) {
pathStyleAccess = getValue(repositorySettings, Repository.PATH_STYLE_ACCESS_SETTING, Repositories.PATH_STYLE_ACCESS_SETTING);
}
logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], " +
"buffer_size [{}], max_retries [{}], use_throttle_retries [{}], cannedACL [{}], storageClass [{}], path_style_access [{}]",
bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, useThrottleRetries, cannedACL,
storageClass, pathStyleAccess);
<<<<<<<
blobStore = new S3BlobStore(settings, s3Service.client(endpoint, protocol, region, key, secret, maxRetries, pathStyleAccess),
=======
blobStore = new S3BlobStore(settings, s3Service.client(endpoint, protocol, region, key, secret, maxRetries, useThrottleRetries),
>>>>>>>
blobStore = new S3BlobStore(settings,
s3Service.client(endpoint, protocol, region, key, secret, maxRetries, useThrottleRetries, pathStyleAccess), |
<<<<<<<
public void onModule(SettingsModule settingsModule) {
=======
@Override
public String name() {
return "mapper-attachments";
}
@Override
public String description() {
return "Adds the attachment type allowing to parse difference attachment formats";
}
@Override
public List<Setting<?>> getSettings() {
>>>>>>>
@Override
public List<Setting<?>> getSettings() { |
<<<<<<<
batchRefUpdate = repoView.getRepository().getRefDatabase().newBatchUpdate();
batchRefUpdate.setPushCertificate(pushCert);
batchRefUpdate.setRefLogMessage(refLogMessage, true);
batchRefUpdate.setAllowNonFastForwards(true);
repoView.getCommands().addTo(batchRefUpdate);
=======
batchRefUpdate = repo.getRefDatabase().newBatchUpdate();
batchRefUpdate.setRefLogMessage(refLogMessage, true);
if (user.isIdentifiedUser()) {
batchRefUpdate.setRefLogIdent(user.asIdentifiedUser().newRefLogIdent(when, tz));
}
commands.addTo(batchRefUpdate);
>>>>>>>
batchRefUpdate = repoView.getRepository().getRefDatabase().newBatchUpdate();
batchRefUpdate.setPushCertificate(pushCert);
batchRefUpdate.setRefLogMessage(refLogMessage, true);
batchRefUpdate.setAllowNonFastForwards(true);
repoView.getCommands().addTo(batchRefUpdate);
if (user.isIdentifiedUser()) {
batchRefUpdate.setRefLogIdent(user.asIdentifiedUser().newRefLogIdent(when, tz));
} |
<<<<<<<
private volatile Map<Integer, IndexShardInjectorPair> shards = emptyMap();
private static class IndexShardInjectorPair {
private final IndexShard indexShard;
private final Injector injector;
public IndexShardInjectorPair(IndexShard indexShard, Injector injector) {
this.indexShard = indexShard;
this.injector = injector;
}
public IndexShard getIndexShard() {
return indexShard;
}
public Injector getInjector() {
return injector;
}
}
=======
private final IndexServicesProvider indexServicesProvider;
private final IndexStore indexStore;
private volatile ImmutableMap<Integer, IndexShard> shards = ImmutableMap.of();
>>>>>>>
private final IndexServicesProvider indexServicesProvider;
private final IndexStore indexStore;
private volatile Map<Integer, IndexShard> shards = emptyMap();
<<<<<<<
HashMap<Integer, IndexShardInjectorPair> newShards = new HashMap<>(shards);
IndexShardInjectorPair indexShardInjectorPair = newShards.remove(shardId);
indexShard = indexShardInjectorPair.getIndexShard();
shardInjector = indexShardInjectorPair.getInjector();
shards = unmodifiableMap(newShards);
closeShardInjector(reason, sId, shardInjector, indexShard);
=======
HashMap<Integer, IndexShard> tmpShardsMap = new HashMap<>(shards);
indexShard = tmpShardsMap.remove(shardId);
shards = ImmutableMap.copyOf(tmpShardsMap);
closeShard(reason, sId, indexShard, indexShard.store());
>>>>>>>
HashMap<Integer, IndexShard> newShards = new HashMap<>(shards);
indexShard = newShards.remove(shardId);
shards = unmodifiableMap(newShards);
closeShard(reason, sId, indexShard, indexShard.store()); |
<<<<<<<
public static final Setting<TimeValue> COMMIT_TIMEOUT_SETTING =
new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s),
(s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"),
SettingsProperty.Dynamic, SettingsProperty.ClusterScope);
public static final Setting<ClusterBlock> NO_MASTER_BLOCK_SETTING =
new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock,
SettingsProperty.Dynamic, SettingsProperty.ClusterScope);
public static final Setting<Boolean> PUBLISH_DIFF_ENABLE_SETTING =
Setting.boolSetting("discovery.zen.publish_diff.enable", true, SettingsProperty.Dynamic, SettingsProperty.ClusterScope);
=======
public static final Setting<TimeValue> COMMIT_TIMEOUT_SETTING = new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s), (s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"), true, Setting.Scope.CLUSTER);
public static final Setting<ClusterBlock> NO_MASTER_BLOCK_SETTING = new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock, true, Setting.Scope.CLUSTER);
public static final Setting<Boolean> PUBLISH_DIFF_ENABLE_SETTING = Setting.boolSetting("discovery.zen.publish_diff.enable", true, true, Setting.Scope.CLUSTER);
public static final Setting<TimeValue> INITIAL_STATE_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), false, Setting.Scope.CLUSTER);
>>>>>>>
public static final Setting<TimeValue> COMMIT_TIMEOUT_SETTING =
new Setting<>("discovery.zen.commit_timeout", (s) -> PUBLISH_TIMEOUT_SETTING.getRaw(s),
(s) -> TimeValue.parseTimeValue(s, TimeValue.timeValueSeconds(30), "discovery.zen.commit_timeout"),
SettingsProperty.Dynamic, SettingsProperty.ClusterScope);
public static final Setting<ClusterBlock> NO_MASTER_BLOCK_SETTING =
new Setting<>("discovery.zen.no_master_block", "write", DiscoverySettings::parseNoMasterBlock,
SettingsProperty.Dynamic, SettingsProperty.ClusterScope);
public static final Setting<Boolean> PUBLISH_DIFF_ENABLE_SETTING =
Setting.boolSetting("discovery.zen.publish_diff.enable", true, SettingsProperty.Dynamic, SettingsProperty.ClusterScope);
public static final Setting<TimeValue> INITIAL_STATE_TIMEOUT_SETTING =
Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), SettingsProperty.ClusterScope); |
<<<<<<<
IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1));
final int primaryTerm = randomInt(200);
=======
IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
>>>>>>>
final int primaryTerm = randomInt(200);
IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
<<<<<<<
routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", null, null, primaryTerm, j == 0, state, 0, unassignedInfo));
=======
routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", null, null, j == 0, state, unassignedInfo));
>>>>>>>
routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", null, null, primaryTerm, j == 0, state, unassignedInfo));
<<<<<<<
IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1));
final int primaryTerm = randomInt(200);
=======
IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
>>>>>>>
final int primaryTerm = randomInt(200);
IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); |
<<<<<<<
private SearchResponse minMaxQuery(ScoreType scoreType, int minChildren, int maxChildren, int cutoff) throws SearchPhaseExecutionException {
=======
private SearchResponse minMaxQuery(String scoreType, int minChildren, int maxChildren) throws SearchPhaseExecutionException {
>>>>>>>
private SearchResponse minMaxQuery(ScoreType scoreType, int minChildren, int maxChildren) throws SearchPhaseExecutionException {
<<<<<<<
.minChildren(minChildren).maxChildren(maxChildren).shortCircuitCutoff(cutoff))
=======
.minChildren(minChildren).maxChildren(maxChildren))
>>>>>>>
.minChildren(minChildren).maxChildren(maxChildren))
<<<<<<<
.minChildren(minChildren).maxChildren(maxChildren).shortCircuitCutoff(cutoff)))
=======
.minChildren(minChildren).maxChildren(maxChildren)))
>>>>>>>
.minChildren(minChildren).maxChildren(maxChildren)))
<<<<<<<
response = minMaxQuery(ScoreType.SUM, 3, 0, cutoff);
=======
response = minMaxQuery("sum", 3, 0);
>>>>>>>
response = minMaxQuery(ScoreType.SUM, 3, 0);
<<<<<<<
response = minMaxQuery(ScoreType.SUM, 2, 2, cutoff);
=======
response = minMaxQuery("sum", 2, 2);
>>>>>>>
response = minMaxQuery(ScoreType.SUM, 2, 2);
<<<<<<<
response = minMaxQuery(ScoreType.SUM, 3, 2, cutoff);
=======
response = minMaxQuery("sum", 3, 2);
>>>>>>>
response = minMaxQuery(ScoreType.SUM, 3, 2);
<<<<<<<
response = minMaxQuery(ScoreType.MAX, 3, 0, cutoff);
=======
response = minMaxQuery("max", 3, 0);
>>>>>>>
response = minMaxQuery(ScoreType.MAX, 3, 0);
<<<<<<<
response = minMaxQuery(ScoreType.MAX, 2, 2, cutoff);
=======
response = minMaxQuery("max", 2, 2);
>>>>>>>
response = minMaxQuery(ScoreType.MAX, 2, 2);
<<<<<<<
response = minMaxQuery(ScoreType.MAX, 3, 2, cutoff);
=======
response = minMaxQuery("max", 3, 2);
>>>>>>>
response = minMaxQuery(ScoreType.MAX, 3, 2);
<<<<<<<
response = minMaxQuery(ScoreType.AVG, 3, 0, cutoff);
=======
response = minMaxQuery("avg", 3, 0);
>>>>>>>
response = minMaxQuery(ScoreType.AVG, 3, 0);
<<<<<<<
response = minMaxQuery(ScoreType.AVG, 2, 2, cutoff);
=======
response = minMaxQuery("avg", 2, 2);
>>>>>>>
response = minMaxQuery(ScoreType.AVG, 2, 2);
<<<<<<<
response = minMaxQuery(ScoreType.AVG, 3, 2, cutoff);
=======
response = minMaxQuery("avg", 3, 2);
>>>>>>>
response = minMaxQuery(ScoreType.AVG, 3, 2);
<<<<<<<
hasChildQueryBuilder.shortCircuitCutoff(randomInt(10));
=======
>>>>>>> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.