conflict_resolution
stringlengths
27
16k
<<<<<<< import com.akiban.sql.optimizer.ExpressionRow; ======= import com.akiban.server.aggregation.AggregatorFactory; >>>>>>> import com.akiban.sql.optimizer.ExpressionRow; import com.akiban.server.aggregation.AggregatorFactory;
<<<<<<< ======= import org.opencastproject.mediapackage.track.TrackImpl; import org.opencastproject.mediapackage.track.VideoStreamImpl; import org.opencastproject.security.api.SecurityService; import org.opencastproject.security.urlsigning.exception.UrlSigningException; import org.opencastproject.security.urlsigning.service.UrlSigningService; import org.opencastproject.security.urlsigning.utils.UrlSigningServiceOsgiUtil; >>>>>>> import org.opencastproject.security.api.SecurityService; import org.opencastproject.security.urlsigning.exception.UrlSigningException; import org.opencastproject.security.urlsigning.service.UrlSigningService; import org.opencastproject.security.urlsigning.utils.UrlSigningServiceOsgiUtil; <<<<<<< /** OSGi callback if properties file is present */ @SuppressWarnings("rawtypes") @Override public void updated(Dictionary properties) throws ConfigurationException { } ======= /** OSGi callback if properties file is present */ @SuppressWarnings("rawtypes") @Override public void updated(Dictionary properties) throws ConfigurationException { expireSeconds = UrlSigningServiceOsgiUtil.getUpdatedSigningExpiration(properties, this.getClass().getSimpleName()); signWithClientIP = UrlSigningServiceOsgiUtil.getUpdatedSignWithClientIP(properties, this.getClass().getSimpleName()); } >>>>>>> /** OSGi callback if properties file is present */ @SuppressWarnings("rawtypes") @Override public void updated(Dictionary properties) throws ConfigurationException { expireSeconds = UrlSigningServiceOsgiUtil.getUpdatedSigningExpiration(properties, this.getClass().getSimpleName()); signWithClientIP = UrlSigningServiceOsgiUtil.getUpdatedSignWithClientIP(properties, this.getClass().getSimpleName()); } <<<<<<< for (MediaPackageElement element : previewPublications) { final URI elementUri = element.getURI(); jPreviews.add(j(f("uri", v(elementUri.toString())))); if (!Type.Track.equals(element.getElementType())) continue; ======= for (Publication pub : previewPublications) { String publicationUri; if (urlSigningService.accepts(pub.getURI().toString())) { try { String clientIP = null; if (signWithClientIP) { clientIP = securityService.getUserIP(); } publicationUri = urlSigningService.sign(pub.getURI().toString(), expireSeconds, null, clientIP); } catch (UrlSigningException e) { logger.error("Error while trying to sign the preview urls because: {}", ExceptionUtils.getStackTrace(e)); throw new WebApplicationException(e, SC_INTERNAL_SERVER_ERROR); } } else { publicationUri = pub.getURI().toString(); } jPreviews.add(j(f("uri", v(publicationUri)))); >>>>>>> for (Publication pub : previewPublications) { String publicationUri; if (urlSigningService.accepts(pub.getURI().toString())) { try { String clientIP = null; if (signWithClientIP) { clientIP = securityService.getUserIP(); } publicationUri = urlSigningService.sign(pub.getURI().toString(), expireSeconds, null, clientIP); } catch (UrlSigningException e) { logger.error("Error while trying to sign the preview urls because: {}", ExceptionUtils.getStackTrace(e)); throw new WebApplicationException(e, SC_INTERNAL_SERVER_ERROR); } } else { publicationUri = pub.getURI().toString(); } jPreviews.add(j(f("uri", v(publicationUri)))); <<<<<<< final URI waveformUri = optWaveform.get().getURI(); jTracks.add(jTrack.merge(j(f("waveform", v(waveformUri.toString()))))); ======= String waveformUri; if (urlSigningService.accepts(optWaveform.get().getURI().toString())) { try { waveformUri = urlSigningService.sign(optWaveform.get().getURI().toString(), expireSeconds, null, null); } catch (UrlSigningException e) { logger.error("Error while trying to sign the waveform urls because: {}", ExceptionUtils.getStackTrace(e)); throw new WebApplicationException(e, SC_INTERNAL_SERVER_ERROR); } } else { waveformUri = optWaveform.get().getURI().toString(); } jTracks.add(jTrack.merge(j(f("waveform", v(waveformUri))))); >>>>>>> String waveformUri; if (urlSigningService.accepts(optWaveform.get().getURI().toString())) { try { waveformUri = urlSigningService.sign(optWaveform.get().getURI().toString(), expireSeconds, null, null); } catch (UrlSigningException e) { logger.error("Error while trying to sign the waveform urls because: {}", ExceptionUtils.getStackTrace(e)); throw new WebApplicationException(e, SC_INTERNAL_SERVER_ERROR); } } else { waveformUri = optWaveform.get().getURI().toString(); } jTracks.add(jTrack.merge(j(f("waveform", v(waveformUri)))));
<<<<<<< import com.akiban.server.RowData; ======= import com.akiban.server.rowdata.RowData; import com.akiban.server.api.common.NoSuchTableException; import com.akiban.server.api.ddl.UnsupportedDropException; >>>>>>> import com.akiban.server.rowdata.RowData;
<<<<<<< boolean needOperand = false; //ConditionList innerConds = null; ======= boolean needOperand = false, multipleOperands = false; ConditionList innerConds = null; >>>>>>> boolean needOperand = false, multipleOperands = false; //ConditionList innerConds = null;
<<<<<<< import com.akiban.qp.operator.QueryCanceledException; ======= import com.akiban.server.expression.ExpressionRegistry; >>>>>>> import com.akiban.qp.operator.QueryCanceledException; import com.akiban.server.expression.ExpressionRegistry;
<<<<<<< public static final TOverload[] WEEK = { new MWeek() { ======= private static final int DEFAULT_MODE = 0; private static final int WEEKOFYEAR_MODE = 3; private final WeekType weekType; private final DateType dateType; protected static enum WeekType { WEEK() { @Override int getYearWeek(int mode, long[] date, MutableDateTime datetime) { return modes[mode].getWeek(datetime, (int) date[MDatetimes.YEAR_INDEX], (int) date[MDatetimes.MONTH_INDEX], (int) date[MDatetimes.DAY_INDEX]); } }, WEEKOFYEAR() { >>>>>>> <<<<<<< @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { doEvaluate(0, inputs.get(0).getInt64(), context, output); } ======= @Override int getYearWeek(int mode, long[] date, MutableDateTime datetime) { return modes[mode].getWeek(datetime, (int) date[MDatetimes.YEAR_INDEX], (int) date[MDatetimes.MONTH_INDEX], (int) date[MDatetimes.DAY_INDEX]); } }, YEARWEEK() { >>>>>>> <<<<<<< ======= @Override int getYearWeek(int mode, long[] date, MutableDateTime datetime) { return yearModes[mode].getYearWeek(datetime, (int) date[MDatetimes.YEAR_INDEX], (int) date[MDatetimes.MONTH_INDEX], (int) date[MDatetimes.DAY_INDEX]); } }; abstract int getYearWeek(int mode, long[] date, MutableDateTime datetime); }; protected static enum DateType { DATETIME(MDatetimes.DATETIME) { @Override long[] decode(long input, TExecutionContext context) { return MDatetimes.decodeDatetime(input); } }, DATE(MDatetimes.DATE) { @Override long[] decode(long input, TExecutionContext context) { return MDatetimes.decodeDate(input); } }, TIMESTAMP(MDatetimes.TIMESTAMP) { @Override long[] decode(long input, TExecutionContext context) { return MDatetimes.decodeTimestamp(input, context.getCurrentTimezone() ); } }; abstract long[] decode(long input, TExecutionContext context); final TClass typeClass; >>>>>>> <<<<<<< @Override public String overloadName() { return "WEEK"; ======= private DateType(TClass dateType) { this.typeClass = dateType; } } public static final TOverload[] WEEK = { new MWeek(WeekType.WEEK, DateType.DATETIME) { @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt64(), DEFAULT_MODE, output); } }, new MWeek(WeekType.WEEK, DateType.DATE) { @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt32(), DEFAULT_MODE, output); } }, new MWeek(WeekType.WEEK, DateType.TIMESTAMP) { @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt32(), DEFAULT_MODE, output); } }, new MWeek(WeekType.WEEK, DateType.DATETIME) { @Override protected void buildInputSets(TInputSetBuilder builder) { builder.covers(MDatetimes.DATETIME, 0); builder.covers(MNumeric.INT, 1); } @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { int mode = inputs.get(1).getInt32(); evaluateYearWeek(context, inputs.get(0).getInt64(), mode, output); } }, new MWeek(WeekType.WEEK, DateType.DATE) { @Override protected void buildInputSets(TInputSetBuilder builder) { builder.covers(MDatetimes.DATE, 0); builder.covers(MNumeric.INT, 1); } @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { int mode = inputs.get(1).getInt32(); evaluateYearWeek(context, inputs.get(0).getInt32(), mode, output); } }, new MWeek(WeekType.WEEK, DateType.TIMESTAMP) { @Override protected void buildInputSets(TInputSetBuilder builder) { builder.covers(MDatetimes.TIMESTAMP, 0); builder.covers(MNumeric.INT, 1); } @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { int mode = inputs.get(1).getInt32(); evaluateYearWeek(context, inputs.get(0).getInt32(), mode, output); } >>>>>>> <<<<<<< @Override protected void buildInputSets(TInputSetBuilder builder) { builder.covers(MDatetimes.DATETIME, 0); builder.covers(MNumeric.INT, 1); ======= @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt32(), WEEKOFYEAR_MODE, output); } }, new MWeek(WeekType.WEEKOFYEAR, DateType.DATE) { @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt32(), WEEKOFYEAR_MODE, output); } }, new MWeek(WeekType.WEEKOFYEAR, DateType.TIMESTAMP) { @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt32(), WEEKOFYEAR_MODE, output); } >>>>>>> <<<<<<< @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { doEvaluate(inputs.get(1).getInt32(), inputs.get(0).getInt64(), context, output); } @Override public String overloadName() { return "WEEK"; ======= @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt32(), DEFAULT_MODE, output); } }, new MWeek(WeekType.YEARWEEK, DateType.DATE) { @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt32(), DEFAULT_MODE, output); } }, new MWeek(WeekType.YEARWEEK, DateType.TIMESTAMP) { @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { evaluateYearWeek(context, inputs.get(0).getInt32(), DEFAULT_MODE, output); } }, new MWeek(WeekType.YEARWEEK, DateType.DATETIME) { @Override protected void buildInputSets(TInputSetBuilder builder) { builder.covers(MDatetimes.DATETIME, 0); builder.covers(MNumeric.INT, 1); } @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { int mode = inputs.get(1).getInt32(); evaluateYearWeek(context, inputs.get(0).getInt32(), mode, output); } }, new MWeek(WeekType.YEARWEEK, DateType.DATE) { @Override protected void buildInputSets(TInputSetBuilder builder) { builder.covers(MDatetimes.DATE, 0); builder.covers(MNumeric.INT, 1); } @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { int mode = inputs.get(1).getInt32(); evaluateYearWeek(context, inputs.get(0).getInt32(), mode, output); } }, new MWeek(WeekType.YEARWEEK, DateType.TIMESTAMP) { @Override protected void buildInputSets(TInputSetBuilder builder) { builder.covers(MDatetimes.TIMESTAMP, 0); builder.covers(MNumeric.INT, 1); } @Override protected void doEvaluate(TExecutionContext context, LazyList<? extends PValueSource> inputs, PValueTarget output) { int mode = inputs.get(1).getInt32(); evaluateYearWeek(context, inputs.get(0).getInt32(), mode, output); } >>>>>>> <<<<<<< }}; public static final TOverload WEEKOFYEAR = new MWeek() { ======= }; protected MWeek(WeekType type, DateType dateType) { this.weekType = type; this.dateType = dateType; } >>>>>>> <<<<<<< protected void buildInputSets(TInputSetBuilder builder) { builder.covers(MDatetimes.DATETIME, 0); } protected void doEvaluate(int mode, long input, TExecutionContext context, PValueTarget output) { long[] date = MDatetimes.decodeDatetime(input); if (isZero(date, context, output)) return; if (mode < 0 || mode > 7) { if (context != null) { context.warnClient(new InvalidParameterValueException("MODE out of range [0, 7]: " + mode)); } output.putNull(); } else { MutableDateTime datetime = MDatetimes.toJodaDatetime(date, context.getCurrentTimezone()); int week = modes[mode].getWeek(datetime, (int)date[MDatetimes.YEAR_INDEX], (int)date[MDatetimes.MONTH_INDEX], (int)date[MDatetimes.DAY_INDEX]); output.putInt32(week); } ======= protected void buildInputSets(TInputSetBuilder builder) { builder.covers(this.dateType.typeClass); } protected void evaluateYearWeek(TExecutionContext context, long input, int mode, PValueTarget output) { long[] date = dateType.decode(input, context); if (!isZero(date, context, output) && isModeRange(mode, context, output)) { MutableDateTime datetime = MDatetimes.toJodaDatetime(date, context.getCurrentTimezone()); int week = weekType.getYearWeek(mode, date, datetime); output.putInt32(week); } >>>>>>> <<<<<<< private static boolean isZero(long[] date, TExecutionContext context,PValueTarget output) { boolean isZero = date[MDatetimes.MONTH_INDEX] == 0L || date[MDatetimes.DAY_INDEX] == 0L; if (isZero) { if (context != null) context.warnClient(new ZeroDateTimeException()); output.putNull(); } return isZero; ======= int week = cal.getDayOfYear() - (firstD +1 ); // Sun/Mon if (firstD < 4) { if (week < 0) return yearModes[lowestVal].getYearWeek(cal, yr - 1, 12, 31); else return yr * 100 + week / 7 + 1; >>>>>>> <<<<<<< ======= else { if (week < 0) return yr * 100 + 1; else return yr * 100 + week / 7 + 2; } } private static int getYearMode0257(MutableDateTime cal, int yr, int mo, int da, int firstDay, int lowestVal) { cal.setYear(yr); cal.setMonthOfYear(1); cal.setDayOfMonth(1); int firstD = 1; while (cal.getDayOfWeek() != firstDay) cal.setDayOfMonth(++firstD); cal.setYear(yr); cal.setMonthOfYear(mo); cal.setDayOfMonth(da); int dayOfYear = cal.getDayOfYear(); if (dayOfYear < firstD) return yearModes[lowestVal].getYearWeek(cal, yr - 1, 12, 31); else return yr * 100 + (dayOfYear - firstD) / 7 +1; } protected static boolean isZero(long[] date, TExecutionContext context,PValueTarget output) { boolean isZero = date[MDatetimes.MONTH_INDEX] == 0L || date[MDatetimes.DAY_INDEX] == 0L; if (isZero) { if (context != null) context.warnClient(new ZeroDateTimeException()); output.putNull(); } return isZero; } protected static boolean isModeRange(int mode, TExecutionContext context, PValueTarget output) { boolean inRange = mode >= 0 && mode <8; if (!inRange) { if (context != null) { context.warnClient(new InvalidParameterValueException("MODE out of range [0, 7]: " + mode)); } output.putNull(); } return inRange; } >>>>>>>
<<<<<<< ======= import com.akiban.ais.model.FullTextIndex; >>>>>>> import com.akiban.ais.model.FullTextIndex; <<<<<<< synchronized(POPULATE_TREE_LOCK) { ex.remove(); } FullTextIndexInfo index = getIndex(session, name); index.deletePath(); synchronized (indexes) { indexes.remove(name); } ======= ex.fetchAndRemove(); >>>>>>> synchronized(POPULATE_TREE_LOCK) { ex.remove(); }
<<<<<<< import com.akiban.server.expression.std.AbstractTwoArgExpressionEvaluation; import com.akiban.server.expression.std.FieldExpression; import com.akiban.server.expression.std.RankExpression; import com.akiban.sql.optimizer.explain.Explainer; ======= import com.akiban.server.api.dml.ColumnSelector; import com.akiban.server.api.dml.IndexRowPrefixSelector; >>>>>>> import com.akiban.server.expression.std.AbstractTwoArgExpressionEvaluation; import com.akiban.server.expression.std.FieldExpression; import com.akiban.server.expression.std.RankExpression; import com.akiban.sql.optimizer.explain.Explainer; import com.akiban.server.api.dml.ColumnSelector; import com.akiban.server.api.dml.IndexRowPrefixSelector;
<<<<<<< protected static void checkArgument(boolean assertion) throws IllegalArgumentException { if (!assertion) { throw new IllegalArgumentException(); } } public boolean cursorAbilitiesInclude(CursorAbility ability) { return false; } ======= @Override >>>>>>> protected static void checkArgument(boolean assertion) throws IllegalArgumentException { if (!assertion) { throw new IllegalArgumentException(); } } @Override
<<<<<<< IndexUsage index = null; try { tracer.beginEvent("sql: optimize: pickbestindex"); index = pickBestIndex(squery); } finally { tracer.endEvent(); } if (squery.getSortColumns() != null) ======= IndexUsage index = pickBestIndex(squery); if ((squery.getSortColumns() != null) && !((index != null) && index.isSorting())) >>>>>>> IndexUsage index = null; try { tracer.beginEvent("sql: optimize: pickbestindex"); index = pickBestIndex(squery); } finally { tracer.endEvent(); } if ((squery.getSortColumns() != null) && !((index != null) && index.isSorting())) <<<<<<< tableType, addAncestors); } ======= tableType, addAncestors, true); >>>>>>> tableType, addAncestors, true); }
<<<<<<< import uk.co.datumedge.hamcrest.json.SameJSONAs; import java.io.IOException; ======= >>>>>>> import java.io.IOException;
<<<<<<< import com.akiban.util.Strings; ======= import com.akiban.sql.optimizer.plan.Sort.OrderByExpression; >>>>>>> import com.akiban.util.Strings; import com.akiban.sql.optimizer.plan.Sort.OrderByExpression;
<<<<<<< import com.akiban.server.RowData; import com.akiban.server.RowDef; ======= import com.akiban.server.InvalidOperationException; import com.akiban.server.rowdata.RowData; import com.akiban.server.rowdata.RowDef; >>>>>>> import com.akiban.server.rowdata.RowData; import com.akiban.server.rowdata.RowDef;
<<<<<<< ======= void share(); boolean isShared(); void release(); void afterRelease(); >>>>>>> void afterRelease();
<<<<<<< sb.append(comparison); // TODO: Need nicer presentation. if (collator != null) sb.append("/").append(collator); ======= sb.append(name()); } @Override public String name () { return comparison.name(); >>>>>>> sb.append(name()); // TODO: Need nicer presentation. if (collator != null) sb.append("/").append(collator); } @Override public String name () { return comparison.name(); <<<<<<< public CompareExpression(Expression lhs, Comparison comparison, Expression rhs, AkCollator collator) { this(AkType.BOOL, lhs, comparison, rhs, collator); } ======= /* * Old version public CompareExpression(Expression lhs, Comparison comparison, Expression rhs) { super(AkType.BOOL, lhs, rhs); this.comparison = comparison; AkType type = childrenType(children()); assert type != null; this.op = readOnlyCompareOps.get(type); if (this.op == null) throw new AkibanInternalException("couldn't find internal comparator for " + type); //this(AkType.BOOL, lhs, comparison, rhs); } */ //copied from trunk >>>>>>> public CompareExpression(Expression lhs, Comparison comparison, Expression rhs, AkCollator collator) { this(AkType.BOOL, lhs, comparison, rhs, collator); } /* * Old version public CompareExpression(Expression lhs, Comparison comparison, Expression rhs) { super(AkType.BOOL, lhs, rhs); this.comparison = comparison; AkType type = childrenType(children()); assert type != null; this.op = readOnlyCompareOps.get(type); if (this.op == null) throw new AkibanInternalException("couldn't find internal comparator for " + type); //this(AkType.BOOL, lhs, comparison, rhs); } */ //copied from trunk
<<<<<<< import com.akiban.server.types3.TAttributeValues; import com.akiban.server.types3.TAttributesDeclaration; import com.akiban.server.types3.TClass; import com.akiban.server.types3.TExecutionContext; import com.akiban.server.types3.TFactory; import com.akiban.server.types3.TInstance; import com.akiban.server.types3.TParser; import com.akiban.server.types3.TParsers; ======= import com.akiban.server.types3.*; >>>>>>> import com.akiban.server.types3.TAttributeValues; import com.akiban.server.types3.TAttributesDeclaration; import com.akiban.server.types3.TClass; import com.akiban.server.types3.TExecutionContext; import com.akiban.server.types3.TFactory; import com.akiban.server.types3.TInstance; import com.akiban.server.types3.TParser; import com.akiban.server.types3.TParsers; <<<<<<< public static final TClass DOUBLE = new MApproximateNumber(TypeId.DOUBLE_ID, PUnderlying.DOUBLE, TParsers.DOUBLE); public static final TClass DOUBLE_UNSIGNED = new MApproximateNumber(TypeId.DOUBLE_UNSIGNED_ID, PUnderlying.DOUBLE, TParsers.DOUBLE); public static final TClass FLOAT = new MApproximateNumber(TypeId.REAL_ID, PUnderlying.FLOAT, TParsers.FLOAT); public static final TClass FLOAT_UNSIGNED = new MApproximateNumber(TypeId.REAL_UNSIGNED_ID, PUnderlying.FLOAT, TParsers.FLOAT); ======= public static final TClass DOUBLE = new MApproximateNumber("double", TypeId.DOUBLE_ID, PUnderlying.DOUBLE); public static final TClass DOUBLE_UNSIGNED = new MApproximateNumber("double unsigned", TypeId.DOUBLE_UNSIGNED_ID, PUnderlying.DOUBLE); public static final TClass FLOAT = new MApproximateNumber("float", TypeId.REAL_ID, PUnderlying.FLOAT); public static final TClass FLOAT_UNSIGNED = new MApproximateNumber("float unsigned", TypeId.REAL_UNSIGNED_ID, PUnderlying.FLOAT); >>>>>>> public static final TClass DOUBLE = new MApproximateNumber("double", TypeId.DOUBLE_ID, PUnderlying.DOUBLE, TParsers.DOUBLE); public static final TClass DOUBLE_UNSIGNED = new MApproximateNumber("double unsigned", TypeId.DOUBLE_UNSIGNED_ID, PUnderlying.DOUBLE, TParsers.DOUBLE); public static final TClass FLOAT = new MApproximateNumber("float", TypeId.REAL_ID, PUnderlying.FLOAT, TParsers.FLOAT); public static final TClass FLOAT_UNSIGNED = new MApproximateNumber("float unsigned", TypeId.REAL_UNSIGNED_ID, PUnderlying.FLOAT, TParsers.FLOAT); <<<<<<< private MApproximateNumber(TypeId typeId, PUnderlying underlying, TParser parser) ======= private MApproximateNumber(String name, TypeId typeId, PUnderlying underlying) >>>>>>> private MApproximateNumber(String name, TypeId typeId, PUnderlying underlying, TParser parser)
<<<<<<< import com.akiban.server.aggregation.AggregatorRegistry; import com.akiban.server.expression.ExpressionFactory; ======= import com.akiban.server.expression.ExpressionRegistry; >>>>>>> import com.akiban.server.aggregation.AggregatorRegistry; import com.akiban.server.expression.ExpressionRegistry; <<<<<<< public ExpressionFactory expressionFactory(); public AggregatorRegistry aggregatorRegistry(); ======= public ExpressionRegistry expressionFactory(); >>>>>>> public ExpressionRegistry expressionFactory(); public AggregatorRegistry aggregatorRegistry();
<<<<<<< import com.akiban.server.aggregation.AggregatorRegistry; import com.akiban.server.expression.ExpressionFactory; ======= import com.akiban.server.expression.ExpressionRegistry; >>>>>>> import com.akiban.server.aggregation.AggregatorRegistry; import com.akiban.server.expression.ExpressionRegistry; <<<<<<< ExpressionFactory expressionFactory, AggregatorRegistry aggregatorRegistry ======= ExpressionRegistry expressionRegistry >>>>>>> ExpressionRegistry expressionRegistry, AggregatorRegistry aggregatorRegistry <<<<<<< this.expressionFactory = expressionFactory; this.aggregatorRegistry = aggregatorRegistry; ======= this.expressionRegistry = expressionRegistry; >>>>>>> this.expressionRegistry = expressionRegistry; this.aggregatorRegistry = aggregatorRegistry; <<<<<<< private final ExpressionFactory expressionFactory; private final AggregatorRegistry aggregatorRegistry; ======= private final ExpressionRegistry expressionRegistry; >>>>>>> private final ExpressionRegistry expressionRegistry; private final AggregatorRegistry aggregatorRegistry;
<<<<<<< import com.akiban.server.types3.TParsers; import com.akiban.server.types3.TParser; ======= import com.akiban.server.types3.TClassFormatter; >>>>>>> import com.akiban.server.types3.TParsers; import com.akiban.server.types3.TParser; import com.akiban.server.types3.TClassFormatter; <<<<<<< return new NoAttrTClass(AkBundle.INSTANCE.id(), name, internalVersion, serialVersion, size, underlying, parser, TypeId.INTEGER_ID); ======= return new NoAttrTClass(AkBundle.INSTANCE.id(), name, formatter, internalVersion, serialVersion, size, underlying, TypeId.INTEGER_ID); >>>>>>> return new NoAttrTClass(AkBundle.INSTANCE.id(), name, formatter, internalVersion, serialVersion, size, underlying, parser, TypeId.INTEGER_ID);
<<<<<<< import com.akiban.server.RowData; ======= import com.akiban.server.rowdata.RowData; import com.akiban.server.api.common.NoSuchTableException; >>>>>>> import com.akiban.server.rowdata.RowData;
<<<<<<< private PooledConverter getPooledConverter(RowOutput output, Set<Integer> columns) throws NoSuchTableException { ======= private PooledConverter getPooledConverter(RowOutput output, Set<ColumnId> columns) throws NoSuchTableException { >>>>>>> private PooledConverter getPooledConverter(RowOutput output, Set<Integer> columns) throws NoSuchTableException { <<<<<<< Set<Integer> scanColumns = scanData.scanAll() ? null : scanData .getScanColumns(); final PooledConverter converter = getPooledConverter(output, scanColumns); ======= Set<ColumnId> scanColumns = scanData.scanAll() ? null : scanData.getScanColumns(); final PooledConverter converter = getPooledConverter(output, scanColumns); >>>>>>> Set<Integer> scanColumns = scanData.scanAll() ? null : scanData.getScanColumns(); final PooledConverter converter = getPooledConverter(output, scanColumns);
<<<<<<< import com.akiban.server.service.ServiceManager; import com.akiban.server.service.ServiceManagerImpl; ======= import com.akiban.server.service.ServiceStartupException; import com.akiban.server.service.config.ConfigurationService; import com.akiban.server.service.dxl.DXLService; import com.akiban.server.service.instrumentation.InstrumentationService; >>>>>>> import com.akiban.server.service.config.ConfigurationService; import com.akiban.server.service.dxl.DXLService; import com.akiban.server.service.instrumentation.InstrumentationService;
<<<<<<< import com.akiban.server.RowData; import com.akiban.server.RowDef; ======= import com.akiban.server.rowdata.RowData; import com.akiban.server.rowdata.RowDef; import com.akiban.server.api.GenericInvalidOperationException; >>>>>>> import com.akiban.server.rowdata.RowData; import com.akiban.server.rowdata.RowDef;
<<<<<<< // The dependence on field positions and fieldCount is a problem for spatial indexes if (index.isSpatial()) { throw new UnsupportedOperationException(index.toString()); } ======= // field and byte indexing is as if the pKey and pValue were one contiguous array of bytes. But we switch // from pKey to pValue as needed to avoid having to actually copy the bytes into such an array. >>>>>>> // The dependence on field positions and fieldCount is a problem for spatial indexes if (index.isSpatial()) { throw new UnsupportedOperationException(index.toString()); } // field and byte indexing is as if the pKey and pValue were one contiguous array of bytes. But we switch // from pKey to pValue as needed to avoid having to actually copy the bytes into such an array. <<<<<<< public void append(Column column, ValueSource source) { // There is no hard requirement that the index is a group index. But while we're adding support for // spatial, we just want to be precise about what kind of index is in use. assert index.isGroupIndex(); keyAppender.append(source, column); } ======= @Override >>>>>>> @Override <<<<<<< this.index = index; key.clear(); this.keyAppender = PersistitKeyAppender.create(key); this.value = null; if (index.isSpatial()) { this.spatialHandler = new SpatialHandler(); this.nIndexFields = index.getAllColumns().size() - index.getKeyColumns().size() + 1; } else { this.spatialHandler = null; this.nIndexFields = index.getAllColumns().size(); } ======= reset(index, key, null, true); >>>>>>> reset(index, key, null, true); <<<<<<< if (index.isSpatial()) { throw new UnsupportedOperationException("Spatial indexes don't implement types3 yet"); } source.attach(keyAppender.key(), position, type); ======= if (position < pKeyFields) { source.attach(pKey, position, type); } else { source.attach(pValue, position - pKeyFields, type); } >>>>>>> if (index.isSpatial()) { throw new UnsupportedOperationException("Spatial indexes don't implement types3 yet"); } if (position < pKeyFields) { source.attach(pKey, position, type); } else { source.attach(pValue, position - pKeyFields, type); } <<<<<<< for (int i = 0; i < indexToHKey.getLength(); i++) { if (indexToHKey.isOrdinal(i)) { ======= for (int i = 0; i < indexToHKey.getLength(); ++i) { if (indexToHKey.isOrdinal(i)) { >>>>>>> for (int i = 0; i < indexToHKey.getLength(); i++) { if (indexToHKey.isOrdinal(i)) { <<<<<<< } else { int depth = indexToHKey.getIndexRowPosition(i); if (index.isSpatial()) { // A spatial index has a single key column (the z-value), representing the declared key columns. depth = depth - index.getKeyColumns().size() + 1; } if (depth < 0 || depth > indexRowKey.getDepth()) { throw new IllegalStateException( "IndexKey too shallow - requires depth=" + depth + ": " + indexRowKey); ======= } else { int indexField = indexToHKey.getIndexRowPosition(i); Key keySource; if (indexField < pKeyFields) { keySource = pKey; } else { keySource = pValue; indexField -= pKeyFields; >>>>>>> } else { int indexField = indexToHKey.getIndexRowPosition(i); if (index.isSpatial()) { // A spatial index has a single key column (the z-value), representing the declared key columns. indexField = indexField - index.getKeyColumns().size() + 1; } Key keySource; if (indexField < pKeyFields) { keySource = pKey; } else { keySource = pValue; indexField -= pKeyFields; <<<<<<< protected Index index; protected int nIndexFields; private PersistitKeyAppender keyAppender; ======= // The notation involving "keys" and "values" is tricky because this code deals with both the index view and // the persistit view, and these don't match up exactly. // // The index view of keys and values: An application-defined index has a key comprising // one or more columns from one table (table index) or multiple tables (group index). An index row has fields // corresponding to these columns, and additional fields corresponding to undeclared hkey columns. // Index.getKeyColumns refers to the declared columns, and Index.getAllColumns refers to the declared and // undeclared columns. // // The persistit view: A record managed by Persistit has a Key and a Value. // // The mapping: For a non-unique index, all of an index's columns (declared and undeclared) are stored in // the Persistit Key. For a unique index, the declared columns are stored in the Persistit Key while the // remaining columns are stored in the Persistit Value. Group indexes are never unique, so all columns // are in the Persistit Key and the Persistit Value is used to store the "table bitmap". // // Terminology: To try and avoid confusion, the terms pKey and pValue will be used when referring to Persistit // Keys and Values. The term key will refer to an index key. // // So why is pValueAppender a PersistitKeyAppender? Because it is convenient to treat index fields // in the style of Persistit Key fields. That permits, for example, byte[] comparisons to determine how values // that happen to reside in a Persistit Value (i.e., an undeclared field of an index row for a unique index). // So as an index row is being created, we deal entirely with Persisitit Keys, via pKeyAppender or pValueAppender. // Only when it is time to write the row are the bytes managed by the pValueAppender written as a single // Persistit Value. protected final PersistitAdapter adapter; private Index index; private Key pKey; private Key pValue; private PersistitKeyAppender pKeyAppender; private PersistitKeyAppender pValueAppender; private int pKeyFields; >>>>>>> // The notation involving "keys" and "values" is tricky because this code deals with both the index view and // the persistit view, and these don't match up exactly. // // The index view of keys and values: An application-defined index has a key comprising // one or more columns from one table (table index) or multiple tables (group index). An index row has fields // corresponding to these columns, and additional fields corresponding to undeclared hkey columns. // Index.getKeyColumns refers to the declared columns, and Index.getAllColumns refers to the declared and // undeclared columns. // // The persistit view: A record managed by Persistit has a Key and a Value. // // The mapping: For a non-unique index, all of an index's columns (declared and undeclared) are stored in // the Persistit Key. For a unique index, the declared columns are stored in the Persistit Key while the // remaining columns are stored in the Persistit Value. Group indexes are never unique, so all columns // are in the Persistit Key and the Persistit Value is used to store the "table bitmap". // // Terminology: To try and avoid confusion, the terms pKey and pValue will be used when referring to Persistit // Keys and Values. The term key will refer to an index key. // // So why is pValueAppender a PersistitKeyAppender? Because it is convenient to treat index fields // in the style of Persistit Key fields. That permits, for example, byte[] comparisons to determine how values // that happen to reside in a Persistit Value (i.e., an undeclared field of an index row for a unique index). // So as an index row is being created, we deal entirely with Persisitit Keys, via pKeyAppender or pValueAppender. // Only when it is time to write the row are the bytes managed by the pValueAppender written as a single // Persistit Value. protected final PersistitAdapter adapter; protected Index index; protected int nIndexFields; private Key pKey; private Key pValue; private PersistitKeyAppender pKeyAppender; private PersistitKeyAppender pValueAppender; private int pKeyFields;
<<<<<<< private void bind(RowData rowData) { if (lastSpatialField > firstSpatialField) { // Point coordinates stored in two columns assert dimensions == 2 : dimensions; double coord = Double.NaN; double x = Double.NaN; double y = Double.NaN; for (int d = 0; d < dimensions; d++) { rowDataSource.bind(fieldDefs[d], rowData); RowDataValueSource rowDataValueSource = (RowDataValueSource) rowDataSource; TClass tclass = tinstances[d].typeClass(); if (tclass == MNumeric.DECIMAL) { BigDecimalWrapper wrapper = TBigDecimal.getWrapper(rowDataValueSource, tinstances[d]); coord = wrapper.asBigDecimal().doubleValue(); } else if (tclass == MNumeric.BIGINT) { coord = rowDataValueSource.getInt64(); } else if (tclass == MNumeric.INT) { coord = rowDataValueSource.getInt32(); } else { assert false : fieldDefs[d].column(); } if (d == 0) { x = coord; } else { y = coord; } coords[d] = coord; } spatialObject = new Point(x, y); } else { // Spatial object encoded in blob rowDataSource.bind(fieldDefs[0], rowData); RowDataValueSource rowDataValueSource = (RowDataValueSource) rowDataSource; TClass tclass = tinstances[0].typeClass(); assert tclass == MBinary.VARBINARY : tclass; byte[] spatialObjectBytes = rowDataValueSource.getBytes(); try { spatialObject = Spatial.deserialize(space, spatialObjectBytes); } catch (ParseException e) { throw new InvalidSpatialObjectException(); } } } ======= >>>>>>>
<<<<<<< protected void dmlPostMetaToPreFinal(OperatorCreator dmlCreator, List<Row> finalGroupRows, boolean isDMLPassing) { dmlPostMetaToPreFinal(dmlCreator, finalGroupRows, isDMLPassing, null, null, null, null); } /** DML transaction starting after DDL METADATA and committing prior DDL FINAL. */ protected void dmlPostMetaToPreFinal(OperatorCreator dmlCreator, List<Row> finalGroupRows, boolean isDMLPassing, List<DataTypeDescriptor> descriptors, List<String> columnNames, OnlineCreateTableAsMT.TestSession server, String sqlQuery){ dmlPostMetaToPreFinal(dmlCreator, finalGroupRows, isDMLPassing, descriptors, columnNames, server, sqlQuery, false); } protected void dmlPostMetaToPreFinal(OperatorCreator dmlCreator, List<Row> finalGroupRows, boolean isDMLPassing, List<DataTypeDescriptor> descriptors, List<String> columnNames, OnlineCreateTableAsMT.TestSession server, String sqlQuery, boolean ignoreNewPK){ ======= protected void dmlPostMetaToPreFinal(OperatorCreator dmlCreator, List<Row> finalGroupRows, boolean isDMLPassing, boolean isDDLPassing) { // In the interest of determinism, DDL transform runs completely *before* DML starts. // The opposite ordering would fail the DDL directly instead (e.g. NotNullViolation vs ConcurrentViolation). >>>>>>> protected void dmlPostMetaToPreFinal(OperatorCreator dmlCreator, List<Row> finalGroupRows, boolean isDMLPassing, boolean isDDLPassing) { dmlPostMetaToPreFinal(dmlCreator, finalGroupRows, isDMLPassing, isDDLPassing, null, null, null, null); } /** DML transaction starting after DDL METADATA and committing prior DDL FINAL. */ protected void dmlPostMetaToPreFinal(OperatorCreator dmlCreator, List<Row> finalGroupRows, boolean isDMLPassing, boolean isDDLPassing, List<DataTypeDescriptor> descriptors, List<String> columnNames, OnlineCreateTableAsMT.TestSession server, String sqlQuery){ dmlPostMetaToPreFinal(dmlCreator, finalGroupRows, isDMLPassing, isDDLPassing, descriptors, columnNames, server, sqlQuery, false); } protected void dmlPostMetaToPreFinal(OperatorCreator dmlCreator, List<Row> finalGroupRows, boolean isDMLPassing, boolean isDDLPassing, List<DataTypeDescriptor> descriptors, List<String> columnNames, OnlineCreateTableAsMT.TestSession server, String sqlQuery, boolean ignoreNewPK){ // In the interest of determinism, DDL transform runs completely *before* DML starts. // The opposite ordering would fail the DDL directly instead (e.g. NotNullViolation vs ConcurrentViolation). <<<<<<< final List<MonitoredThread> threads; if(isDMLPassing) { threads = builder.build(this, descriptors, columnNames, server, sqlQuery); ThreadHelper.runAndCheck(threads); } else { threads = builder.build(this); UncaughtHandler handler = ThreadHelper.startAndJoin(threads); assertEquals("ddl failure", null, handler.thrown.get(threads.get(0))); } ======= final List<MonitoredThread> threads = builder.build(this); ThreadHelper.startAndJoin(threads); >>>>>>> final List<MonitoredThread> threads; if(isDMLPassing) { threads = builder.build(this, descriptors, columnNames, server, sqlQuery); ThreadHelper.startAndJoin(threads); } else { threads = builder.build(this); UncaughtHandler handler = ThreadHelper.startAndJoin(threads); assertEquals("ddl failure", null, handler.thrown.get(threads.get(0))); } <<<<<<< .build(this, descriptors, columnNames, server, sqlQuery); ======= .rollbackRetry(!isDMLFailing) .build(this); >>>>>>> .rollbackRetry(!isDMLFailing) .build(this, descriptors, columnNames, server, sqlQuery);
<<<<<<< import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonNode; ======= import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.JsonParseException; import org.eclipse.jetty.client.ContentExchange; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.HttpExchange; import org.junit.After; >>>>>>> import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonNode; import org.eclipse.jetty.client.ContentExchange; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.HttpExchange; import org.junit.After;
<<<<<<< import com.akiban.server.aggregation.DummyAggregatorRegistry; import com.akiban.server.expression.ExpressionFactory; ======= import com.akiban.server.expression.ExpressionRegistry; >>>>>>> import com.akiban.server.aggregation.DummyAggregatorRegistry; import com.akiban.server.expression.ExpressionRegistry; <<<<<<< super(ais, ExpressionFactory.EMPTY, new DummyAggregatorRegistry(), rules); ======= super(ais, ExpressionRegistry.EMPTY, rules); >>>>>>> super(ais, ExpressionRegistry.EMPTY, new DummyAggregatorRegistry(), rules);
<<<<<<< import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonNode; import org.codehaus.jackson.JsonFactory; ======= import org.codehaus.jackson.JsonGenerator; import org.codehaus.jackson.JsonNode; >>>>>>> import org.codehaus.jackson.JsonFactory; <<<<<<< JsonParser json = oldJsonFactory.createJsonParser(ajdax); ======= JsonParser json = jsonParser(jonquil); >>>>>>> JsonParser json = oldJsonFactory.createJsonParser(jonquil);
<<<<<<< DataTypeDescriptor sqlType = expression.getSQLtype(); targetInstance.setNullable(sqlType == null || sqlType.isNullable()); ExpressionNode result ======= targetInstance.setNullable(expression.getSQLtype().isNullable()); CastExpression castExpression >>>>>>> DataTypeDescriptor sqlType = expression.getSQLtype(); targetInstance.setNullable(sqlType == null || sqlType.isNullable()); targetInstance.setNullable(expression.getSQLtype().isNullable()); CastExpression castExpression
<<<<<<< CostEstimator costEstimator) { super(ais, functionsRegistry, costEstimator, DEFAULT_RULES); ======= IndexEstimator indexEstimator) { super(ais, functionsRegistry, indexEstimator, DEFAULT_RULES, properties); >>>>>>> CostEstimator costEstimator) { super(ais, functionsRegistry, costEstimator, DEFAULT_RULES, properties);
<<<<<<< public PostgresStatement generateInitial(PostgresServerSession server, StatementNode stmt, List<ParameterNode> params, int[] paramTypes) { ======= public PostgresStatement generate(PostgresServerSession server, String sql, StatementNode stmt, List<ParameterNode> params, int[] paramTypes) { >>>>>>> public PostgresStatement generateInitial(PostgresServerSession server, String sql, StatementNode stmt, List<ParameterNode> params, int[] paramTypes) {
<<<<<<< Transaction transaction = ServiceManagerImpl.get().getTreeService().getTransaction(session); int retriesLeft = SCAN_RETRY_COUNT; while (true) { output.mark(); try { transaction.begin(); try { boolean ret = scanner.doScan(cursor, cursorId, output); transaction.commit(); return ret; } catch (RollbackException e) { logger.trace("PersistIt error; retrying", e); scanner.scanHooks.retryHook(); output.rewind(); if (--retriesLeft <= 0) { throw new GenericInvalidOperationException(e); } try { cursor = reopen(session, cursorId, cursor.getScanRequest(), false); } catch (InvalidOperationException e1) { throw new GenericInvalidOperationException(e1); } } catch (PersistitException e) { throw new GenericInvalidOperationException(e); } finally { transaction.end(); } } catch (PersistitException e) { throw new GenericInvalidOperationException(e); } } ======= if (CursorState.CONCURRENT_MODIFICATION.equals(cursor.getState())) { throw new ConcurrentScanAndUpdateException("for cursor " + cursorId); } if (cursor.isFinished()) { throw new CursorIsFinishedException(cursorId); } return scanner.doScan(cursor, cursorId, output); >>>>>>> if (CursorState.CONCURRENT_MODIFICATION.equals(cursor.getState())) { throw new ConcurrentScanAndUpdateException("for cursor " + cursorId); } if (cursor.isFinished()) { throw new CursorIsFinishedException(cursorId); } Transaction transaction = ServiceManagerImpl.get().getTreeService().getTransaction(session); int retriesLeft = SCAN_RETRY_COUNT; while (true) { output.mark(); try { transaction.begin(); try { boolean ret = scanner.doScan(cursor, cursorId, output); transaction.commit(); return ret; } catch (RollbackException e) { logger.trace("PersistIt error; retrying", e); scanner.scanHooks.retryHook(); output.rewind(); if (--retriesLeft <= 0) { throw new GenericInvalidOperationException(e); } try { cursor = reopen(session, cursorId, cursor.getScanRequest(), false); } catch (InvalidOperationException e1) { throw new GenericInvalidOperationException(e1); } } catch (PersistitException e) { throw new GenericInvalidOperationException(e); } finally { transaction.end(); } } catch (PersistitException e) { throw new GenericInvalidOperationException(e); } } <<<<<<< BufferFullException, RollbackException { ======= BufferFullException { >>>>>>> BufferFullException, RollbackException {
<<<<<<< ======= if (rowDef.isGroupTable()) { ts.setRowCount(2); ts.setAutoIncrementValue(-1); } else { ts.setAutoIncrementValue(status.getAutoIncrement()); ts.setRowCount(status.getRowCount()); } // TODO - get correct values ts.setMeanRecordLength(100); ts.setBlockSize(8192); >>>>>>>
<<<<<<< RoutineLoader routineLoader, TransactionService txnService) { reqs = new ServerServiceRequirements(dxlService, instrumentation, ======= RoutineLoader routineLoader) { reqs = new ServerServiceRequirements(akServer, dxlService, instrumentation, >>>>>>> RoutineLoader routineLoader, TransactionService txnService) { reqs = new ServerServiceRequirements(akServer, dxlService, instrumentation,
<<<<<<< @Override public AkCollator collatorAt(int index) { // TODO - probably incorrect return null; } ======= @Override public TInstance typeInstanceAt(int index) { return tInstances[index]; } >>>>>>> @Override public AkCollator collatorAt(int index) { // TODO - probably incorrect return null; } @Override public TInstance typeInstanceAt(int index) { return tInstances[index]; }
<<<<<<< ======= import com.foundationdb.sql.parser.IndexDefinitionNode; import com.foundationdb.sql.pg.PostgresQueryContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; >>>>>>> import com.foundationdb.sql.parser.IndexDefinitionNode; import com.foundationdb.sql.pg.PostgresQueryContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory;
<<<<<<< //-------------------------------------------------------------------------- /** * Date/time types: * DATE - DATE, TIME - TIME, YEAR - YEAR , DATETIME - DATETIME, => result is an INTERVAL * DATE + INTERVAL => DATE, TIME + INTERVAL => TIME, ....etc * INTERVAL + DATE => DATE, INTERVAL + TIME => TIME, ....etc * DATE - INTERVAL => DATE, TIME - INTERVAL => TIME, ....etc * INTERVAL + INTERVAL => INTERVAL * INTERVAL - INTERVAL => INTERVAL * INTERVAL * n (of anytypes :double, long ,...etc) => INTERVAL * INTERVAL / n = INTERVAL * * Regular types: * Anything [+/*-] DECIMAL => DECIMAL * Anything (except DECIMAL) [+/*-] DOUBLE = > DOUBLE * Anything (except DECIMAL and DOUBLE) [+/*-] U_BIGINT => U_BIGINT * LONG [+/*-] LONG => LONG * * Anything else is unsupported * * * @param leftT * @param rightT * @param op * @return topType * @author VyNguyen * * * Why/how this works: * 1) find leftT and rightT's key * 2) find product of the two keys * 3) find sum of the two keys * * if sum is neg then both are not supported since .get() only returns -1 if the types aren't in the map * else * if product is zero then at least one of the two is an INTERVAL (key of INTERVAL is zero) * check sum : * if sum is even (0, 2, 4...etc..) then the other operand is either an interval or date/time * check op : if it's anything other than + or - => throw exception * if sum is odd (1, 3, 5...etc..) then the other operand is a regular numeric type * check of : if it's anything other than * or / => throw exception * if product is positive, then both are supported types * check product: * if it's odd, then the two operands are both regular numeric (since the product of two numbers can only be odd if both the numbers are odd) * if it's even, at least one of the two is date/time * the only legal case is when both operands are date, or time, and the operator is minus * else => throw exception * if product is negative, then one is supported and one is NOT supported * check sum: * if sum is odd => one of the two operand is a date/time and the other is unsupported (since unsupported type get a key of -1, and date/time's key is an even. even -1 = odd) * in which case, throw an exception * else if sum is even => unsupported and a numeric => return the numeric type */ private static AkType getTopType (AkType leftT, AkType rightT, ArithOp op) ======= @Override protected boolean nullIsContaminating() { return true; } protected static AkType getTopType (AkType leftT, AkType rightT) >>>>>>> /** * Date/time types: * DATE - DATE, TIME - TIME, YEAR - YEAR , DATETIME - DATETIME, => result is an INTERVAL * DATE + INTERVAL => DATE, TIME + INTERVAL => TIME, ....etc * INTERVAL + DATE => DATE, INTERVAL + TIME => TIME, ....etc * DATE - INTERVAL => DATE, TIME - INTERVAL => TIME, ....etc * INTERVAL + INTERVAL => INTERVAL * INTERVAL - INTERVAL => INTERVAL * INTERVAL * n (of anytypes :double, long ,...etc) => INTERVAL * INTERVAL / n = INTERVAL * * Regular types: * Anything [+/*-] DECIMAL => DECIMAL * Anything (except DECIMAL) [+/*-] DOUBLE = > DOUBLE * Anything (except DECIMAL and DOUBLE) [+/*-] U_BIGINT => U_BIGINT * LONG [+/*-] LONG => LONG * * Anything else is unsupported * * * @param leftT * @param rightT * @param op * @return topType * @author VyNguyen * * * Why/how this works: * 1) find leftT and rightT's key * 2) find product of the two keys * 3) find sum of the two keys * * if sum is neg then both are not supported since .get() only returns -1 if the types aren't in the map * else * if product is zero then at least one of the two is an INTERVAL (key of INTERVAL is zero) * check sum : * if sum is even (0, 2, 4...etc..) then the other operand is either an interval or date/time * check op : if it's anything other than + or - => throw exception * if sum is odd (1, 3, 5...etc..) then the other operand is a regular numeric type * check of : if it's anything other than * or / => throw exception * if product is positive, then both are supported types * check product: * if it's odd, then the two operands are both regular numeric (since the product of two numbers can only be odd if both the numbers are odd) * if it's even, at least one of the two is date/time * the only legal case is when both operands are date, or time, and the operator is minus * else => throw exception * if product is negative, then one is supported and one is NOT supported * check sum: * if sum is odd => one of the two operand is a date/time and the other is unsupported (since unsupported type get a key of -1, and date/time's key is an even. even -1 = odd) * in which case, throw an exception * else if sum is even => unsupported and a numeric => return the numeric type */ protected static AkType getTopType (AkType leftT, AkType rightT, ArithOp op)
<<<<<<< FlattenState fleft = flatten(left); FlattenState fright = flatten(right); ======= FlattenState fleft = flatten(left, branch); FlattenState fright = flatten(right, branch); if (fleft == null) { return fright; } else if (fright == null) { return fleft; } int flags = DEFAULT; switch (jjoin.getJoinType()) { case LEFT: flags = LEFT_JOIN; break; case RIGHT: flags = RIGHT_JOIN; break; } >>>>>>> FlattenState fleft = flatten(left, branch); FlattenState fright = flatten(right, branch); if (fleft == null) { return fright; } else if (fright == null) { return fleft; }
<<<<<<< ======= private List<IndexColumn> keyColumns; >>>>>>> <<<<<<< // It really is an IndexDef, but declaring it that way creates trouble for AIS. We don't want to pull in // all the RowDef stuff and have it visible to GWT. private /* IndexDef */ Object indexDef; private IndexRowComposition indexRowComposition; private IndexToHKey indexToHKey; private boolean isHKeyEquivalent; ======= private transient JoinType joinType; private transient boolean isValid; private transient IndexDef indexDef; private transient IndexRowComposition indexRowComposition; private transient IndexToHKey indexToHKey; private transient boolean isHKeyEquivalent; private transient List<IndexColumn> valueColumns; >>>>>>> private IndexDef indexDef; private IndexRowComposition indexRowComposition; private IndexToHKey indexToHKey; private boolean isHKeyEquivalent; private List<IndexColumn> keyColumns; private List<IndexColumn> valueColumns;
<<<<<<< import com.akiban.ais.model.*; ======= import static com.akiban.sql.optimizer.rule.BranchJoiner_CBO.*; import com.akiban.sql.optimizer.rule.costmodel.CostModel; import com.akiban.sql.optimizer.rule.costmodel.TableRowCounts; >>>>>>> import com.akiban.ais.model.*; import com.akiban.sql.optimizer.rule.costmodel.CostModel; import com.akiban.sql.optimizer.rule.costmodel.TableRowCounts; <<<<<<< ======= import com.akiban.ais.model.Group; import com.akiban.ais.model.Index; import com.akiban.ais.model.Join; import com.akiban.ais.model.Table; import com.akiban.ais.model.UserTable; import com.akiban.qp.rowtype.Schema; import com.akiban.qp.rowtype.UserTableRowType; >>>>>>> import com.akiban.qp.rowtype.Schema; import com.akiban.qp.rowtype.UserTableRowType; <<<<<<< public abstract long getTableRowCount(Table table); public abstract IndexStatistics getIndexStatistics(Index index); public abstract IndexStatistics[] getIndexColumnStatistics(Index index); ======= protected CostEstimator(SchemaRulesContext rulesContext) { this(rulesContext.getSchema()); } >>>>>>> protected CostEstimator(SchemaRulesContext rulesContext) { this(rulesContext.getSchema()); } <<<<<<< return keyCopy(); } protected boolean isConstant(ExpressionNode node) { return node instanceof ConstantExpression; ======= else if (upper) { key.append(Key.AFTER); } byte[] keyBytes = new byte[key.getEncodedSize()]; System.arraycopy(key.getEncodedBytes(), 0, keyBytes, 0, keyBytes.length); return keyBytes; >>>>>>> else if (upper) { key.append(Key.AFTER); } return keyCopy(); } protected boolean isConstant(ExpressionNode node) { return node instanceof ConstantExpression;
<<<<<<< public long sequenceNextValue(TableName sequenceName) { throw new UnsupportedOperationException(); } @Override public long sequenceCurrentValue(TableName sequenceName) { throw new UnsupportedOperationException(); } @Override public long hash(ValueSource valueSource, AkCollator collator) { throw new UnsupportedOperationException(); } @Override ======= >>>>>>> public long sequenceNextValue(TableName sequenceName) { throw new UnsupportedOperationException(); } @Override public long sequenceCurrentValue(TableName sequenceName) { throw new UnsupportedOperationException(); } @Override
<<<<<<< ======= boolean sameState(String pattern, char escape); /** * * @param str * @param count * @return * <p> a negative value if the pattern is not in <code>str</code></p> * <p> a positive number indicating the index at which the pattern/substring is found</p> * * Note: Dependent upon the implementation, it's not guaranteed that * the positive number returned by this function is always the index position. * The positive value could simply be used as an indication that a match has been found */ >>>>>>> boolean sameState(String pattern, char escape); /** * * @param str * @param count * @return * <p> a negative value if the pattern is not in <code>str</code></p> * <p> a positive number indicating the index at which the pattern/substring is found</p> * * Note: Dependent upon the implementation, it's not guaranteed that * the positive number returned by this function is always the index position. * The positive value could simply be used as an indication that a match has been found */
<<<<<<< import com.persistit.Key; import com.persistit.Transaction; ======= >>>>>>> import com.persistit.Key; <<<<<<< cursor = new PersistitIndexCursor(this, schema.indexRowType(index), keyRange, ordering, selector); } catch (PersistitInterruptedException e) { throw new QueryCanceledException(); ======= cursor = new PersistitIndexCursor(this, schema.indexRowType(index), reverse, keyRange, selector); >>>>>>> cursor = new PersistitIndexCursor(this, schema.indexRowType(index), keyRange, ordering, selector); <<<<<<< public Key newKey() { return new Key(persistit.getDb()); } private Exchange transact(Exchange exchange) ======= public void handlePersistitException(PersistitException e) >>>>>>> public Key newKey() { return new Key(persistit.getDb()); } public void handlePersistitException(PersistitException e)
<<<<<<< Service<PersistitService> persistitService(); Service<SchemaManager> schemaManager(); Service<Store> storeService(); ======= Service memcacheService(); >>>>>>> Service<PersistitService> persistitService(); Service<SchemaManager> schemaManager(); Service<Store> storeService(); Service memcacheService();
<<<<<<< import com.akiban.qp.operator.StoreAdapter; import com.akiban.qp.persistitadapter.PersistitAdapter; ======= import com.akiban.qp.operator.StoreAdapter; >>>>>>> import com.akiban.qp.operator.StoreAdapter; <<<<<<< import com.persistit.Value; import com.persistit.exception.PersistitException; ======= >>>>>>> import com.persistit.Value; <<<<<<< public void copyFromExchange(Exchange ex) ======= public void copyFromExchange(Exchange exchange) >>>>>>> public void copyFromExchange(Exchange ex)
<<<<<<< import com.akiban.util.tap.Tap; ======= import com.akiban.sql.server.ServerParameterDecoder; import com.akiban.util.tap.InOutTap; >>>>>>> import com.akiban.util.tap.InOutTap; <<<<<<< protected abstract Tap.InOutTap executeTap(); protected abstract Tap.InOutTap acquireLockTap(); ======= protected Bindings getBindings() { return new ArrayBindings(0); } protected int getNParameters() { if (parameterTypes == null) return 0; else return parameterTypes.length; } protected Bindings getParameterBindings(Object[] parameters) { ServerParameterDecoder decoder = new ServerParameterDecoder(); ArrayBindings bindings = new ArrayBindings(parameters.length); for (int i = 0; i < parameters.length; i++) { PostgresType pgType = (parameterTypes == null) ? null : parameterTypes[i]; bindings.set(i, decoder.decodeParameter(parameters[i], pgType)); } return bindings; } protected void setEnvironmentBindings(PostgresServerSession session, Bindings bindings) { if (environmentSettings != null) { int position = getNParameters(); for (EnvironmentExpressionSetting environmentSetting : environmentSettings) { bindings.set(position++, session.getEnvironmentValue(environmentSetting)); } } } protected abstract InOutTap executeTap(); protected abstract InOutTap acquireLockTap(); >>>>>>> protected abstract InOutTap executeTap(); protected abstract InOutTap acquireLockTap();
<<<<<<< import com.akiban.server.RowDef; ======= import com.akiban.server.InvalidOperationException; import com.akiban.server.rowdata.RowDef; >>>>>>> import com.akiban.server.rowdata.RowDef;
<<<<<<< lock(session); try { switch (ddl.getNodeType()) { case NodeTypes.CREATE_SCHEMA_NODE: SchemaDDL.createSchema(ais, schema, (CreateSchemaNode)ddl); break; case NodeTypes.DROP_SCHEMA_NODE: SchemaDDL.dropSchema(ddlFunctions, session, (DropSchemaNode)ddl); break; case NodeTypes.CREATE_TABLE_NODE: TableDDL.createTable(ddlFunctions, session, schema, (CreateTableNode)ddl); break; case NodeTypes.DROP_TABLE_NODE: TableDDL.dropTable(ddlFunctions, session, schema, (DropTableNode)ddl); break; case NodeTypes.CREATE_VIEW_NODE: // TODO: Need to store persistently in AIS (or its extension). try { ((AISBinder)server.getAttribute("aisBinder")).addView(new ViewDefinition(ddl, server.getParser())); } catch (StandardException ex) { throw new ParseException ("", ex.getMessage(), ddl.toString()); } break; case NodeTypes.DROP_VIEW_NODE: ((AISBinder)server.getAttribute("aisBinder")).removeView(((DropViewNode)ddl).getObjectName()); break; case NodeTypes.CREATE_INDEX_NODE: IndexDDL.createIndex(ddlFunctions, session, schema, (CreateIndexNode)ddl); break; case NodeTypes.DROP_INDEX_NODE: IndexDDL.dropIndex(ddlFunctions, session, schema, (DropIndexNode)ddl); case NodeTypes.ALTER_TABLE_NODE: AlterTableDDL.alterTable(ddlFunctions, session, schema, (AlterTableNode)ddl); break; case NodeTypes.RENAME_NODE: if (((RenameNode)ddl).getRenameType() == RenameNode.RenameType.INDEX) { IndexDDL.renameIndex(ddlFunctions, session, schema, (RenameNode)ddl); } else if (((RenameNode)ddl).getRenameType() == RenameNode.RenameType.TABLE) { TableDDL.renameTable(ddlFunctions, session, schema, (RenameNode)ddl); } case NodeTypes.REVOKE_NODE: default: throw new UnsupportedSQLException (ddl.statementToString(), ddl); ======= switch (ddl.getNodeType()) { case NodeTypes.CREATE_SCHEMA_NODE: SchemaDDL.createSchema(ais, schema, (CreateSchemaNode)ddl); break; case NodeTypes.DROP_SCHEMA_NODE: SchemaDDL.dropSchema(ddlFunctions, session, (DropSchemaNode)ddl); break; case NodeTypes.CREATE_TABLE_NODE: TableDDL.createTable(ddlFunctions, session, schema, (CreateTableNode)ddl); break; case NodeTypes.DROP_TABLE_NODE: TableDDL.dropTable(ddlFunctions, session, schema, (DropTableNode)ddl); break; case NodeTypes.CREATE_VIEW_NODE: // TODO: Need to store persistently in AIS (or its extension). try { ((AISBinder)server.getAttribute("aisBinder")).addView(new ViewDefinition(ddl, server.getParser())); } catch (StandardException ex) { throw new ParseException ("", ex.getMessage(), ddl.toString()); } break; case NodeTypes.DROP_VIEW_NODE: ((AISBinder)server.getAttribute("aisBinder")).removeView(((DropViewNode)ddl).getObjectName()); break; case NodeTypes.CREATE_INDEX_NODE: IndexDDL.createIndex(ddlFunctions, session, schema, (CreateIndexNode)ddl); break; case NodeTypes.DROP_INDEX_NODE: IndexDDL.dropIndex(ddlFunctions, session, schema, (DropIndexNode)ddl); break; case NodeTypes.ALTER_TABLE_NODE: AlterTableDDL.alterTable(ddlFunctions, session, schema, (AlterTableNode)ddl); break; case NodeTypes.RENAME_NODE: if (((RenameNode)ddl).getRenameType() == RenameNode.RenameType.INDEX) { IndexDDL.renameIndex(ddlFunctions, session, schema, (RenameNode)ddl); } else if (((RenameNode)ddl).getRenameType() == RenameNode.RenameType.TABLE) { TableDDL.renameTable(ddlFunctions, session, schema, (RenameNode)ddl); >>>>>>> lock(session); try { switch (ddl.getNodeType()) { case NodeTypes.CREATE_SCHEMA_NODE: SchemaDDL.createSchema(ais, schema, (CreateSchemaNode)ddl); break; case NodeTypes.DROP_SCHEMA_NODE: SchemaDDL.dropSchema(ddlFunctions, session, (DropSchemaNode)ddl); break; case NodeTypes.CREATE_TABLE_NODE: TableDDL.createTable(ddlFunctions, session, schema, (CreateTableNode)ddl); break; case NodeTypes.DROP_TABLE_NODE: TableDDL.dropTable(ddlFunctions, session, schema, (DropTableNode)ddl); break; case NodeTypes.CREATE_VIEW_NODE: // TODO: Need to store persistently in AIS (or its extension). try { ((AISBinder)server.getAttribute("aisBinder")).addView(new ViewDefinition(ddl, server.getParser())); } catch (StandardException ex) { throw new ParseException ("", ex.getMessage(), ddl.toString()); } break; case NodeTypes.DROP_VIEW_NODE: ((AISBinder)server.getAttribute("aisBinder")).removeView(((DropViewNode)ddl).getObjectName()); break; case NodeTypes.CREATE_INDEX_NODE: IndexDDL.createIndex(ddlFunctions, session, schema, (CreateIndexNode)ddl); break; case NodeTypes.DROP_INDEX_NODE: IndexDDL.dropIndex(ddlFunctions, session, schema, (DropIndexNode)ddl); break; case NodeTypes.ALTER_TABLE_NODE: AlterTableDDL.alterTable(ddlFunctions, session, schema, (AlterTableNode)ddl); break; case NodeTypes.RENAME_NODE: if (((RenameNode)ddl).getRenameType() == RenameNode.RenameType.INDEX) { IndexDDL.renameIndex(ddlFunctions, session, schema, (RenameNode)ddl); } else if (((RenameNode)ddl).getRenameType() == RenameNode.RenameType.TABLE) { TableDDL.renameTable(ddlFunctions, session, schema, (RenameNode)ddl); } case NodeTypes.REVOKE_NODE: default: throw new UnsupportedSQLException (ddl.statementToString(), ddl);
<<<<<<< import java.util.ArrayList; import java.util.Collection; ======= import java.io.Writer; import java.security.NoSuchAlgorithmException; >>>>>>> import java.util.ArrayList; import java.util.Collection; import java.io.Writer; import java.security.NoSuchAlgorithmException;
<<<<<<< return new TestOperatorCompiler(parser, ais, defaultSchemaName, functionsRegistry, costEstimator); ======= return new TestOperatorCompiler(parser, properties, ais, defaultSchemaName, functionsRegistry, indexEstimator); >>>>>>> return new TestOperatorCompiler(parser, properties, ais, defaultSchemaName, functionsRegistry, costEstimator); <<<<<<< CostEstimator costEstimator) { super(parser, ais, defaultSchemaName, functionsRegistry, costEstimator); ======= IndexEstimator indexEstimator) { super(parser, properties, ais, defaultSchemaName, functionsRegistry, indexEstimator); >>>>>>> CostEstimator costEstimator) { super(parser, properties, ais, defaultSchemaName, functionsRegistry, costEstimator);
<<<<<<< protected RowStream assembleUsingBloomFilter(UsingBloomFilter usingBloomFilter) { BloomFilter bloomFilter = usingBloomFilter.getBloomFilter(); int pos = pushHashTable(bloomFilter); RowStream lstream = assembleStream(usingBloomFilter.getLoader()); RowStream stream = assembleStream(usingBloomFilter.getInput()); stream.operator = API.using_BloomFilter(lstream.operator, lstream.rowType, bloomFilter.getEstimatedSize(), pos, stream.operator); popHashTable(bloomFilter); return stream; } protected RowStream assembleBloomFilterFilter(BloomFilterFilter bloomFilterFilter) { BloomFilter bloomFilter = bloomFilterFilter.getBloomFilter(); int pos = getHashTablePosition(bloomFilter); RowStream stream = assembleStream(bloomFilterFilter.getInput()); boundRows.set(pos, stream.fieldOffsets); RowStream cstream = assembleStream(bloomFilterFilter.getCheck()); boundRows.set(pos, null); List<Expression> fields = assembleExpressions(bloomFilterFilter.getLookupExpressions(), stream.fieldOffsets); stream.operator = API.select_BloomFilter(stream.operator, cstream.operator, fields, pos); return stream; } ======= protected RowStream assembleOnlyIfEmpty(OnlyIfEmpty onlyIfEmpty) { RowStream stream = assembleStream(onlyIfEmpty.getInput()); stream.operator = API.limit_Default(stream.operator, 0, false, 1, false); // Nulls here have no semantic meaning, but they're easier than trying to // figure out an interesting non-null value for each // AkType in the row. All that really matters is that the // row is there. Expression[] nulls = new Expression[stream.rowType.nFields()]; Arrays.fill(nulls, LiteralExpression.forNull()); stream.operator = API.ifEmpty_Default(stream.operator, stream.rowType, Arrays.asList(nulls), API.InputPreservationOption.DISCARD_INPUT); return stream; } >>>>>>> protected RowStream assembleOnlyIfEmpty(OnlyIfEmpty onlyIfEmpty) { RowStream stream = assembleStream(onlyIfEmpty.getInput()); stream.operator = API.limit_Default(stream.operator, 0, false, 1, false); // Nulls here have no semantic meaning, but they're easier than trying to // figure out an interesting non-null value for each // AkType in the row. All that really matters is that the // row is there. Expression[] nulls = new Expression[stream.rowType.nFields()]; Arrays.fill(nulls, LiteralExpression.forNull()); stream.operator = API.ifEmpty_Default(stream.operator, stream.rowType, Arrays.asList(nulls), API.InputPreservationOption.DISCARD_INPUT); return stream; } protected RowStream assembleUsingBloomFilter(UsingBloomFilter usingBloomFilter) { BloomFilter bloomFilter = usingBloomFilter.getBloomFilter(); int pos = pushHashTable(bloomFilter); RowStream lstream = assembleStream(usingBloomFilter.getLoader()); RowStream stream = assembleStream(usingBloomFilter.getInput()); stream.operator = API.using_BloomFilter(lstream.operator, lstream.rowType, bloomFilter.getEstimatedSize(), pos, stream.operator); popHashTable(bloomFilter); return stream; } protected RowStream assembleBloomFilterFilter(BloomFilterFilter bloomFilterFilter) { BloomFilter bloomFilter = bloomFilterFilter.getBloomFilter(); int pos = getHashTablePosition(bloomFilter); RowStream stream = assembleStream(bloomFilterFilter.getInput()); boundRows.set(pos, stream.fieldOffsets); RowStream cstream = assembleStream(bloomFilterFilter.getCheck()); boundRows.set(pos, null); List<Expression> fields = assembleExpressions(bloomFilterFilter.getLookupExpressions(), stream.fieldOffsets); stream.operator = API.select_BloomFilter(stream.operator, cstream.operator, fields, pos); return stream; }
<<<<<<< import com.akiban.server.service.dxl.DXLReadWriteLockHook; import com.akiban.server.service.session.Session; ======= import com.akiban.qp.operator.*; >>>>>>> import com.akiban.server.service.dxl.DXLReadWriteLockHook; import com.akiban.server.service.session.Session; import com.akiban.qp.operator.*; <<<<<<< private static final Tap.InOutTap EXECUTE_TAP = Tap.createTimer("PostgresBaseStatement: execute shared"); private static final Tap.InOutTap ACQUIRE_LOCK_TAP = Tap.createTimer("PostgresBaseStatement: acquire shared lock"); public PostgresOperatorStatement(PhysicalOperator resultOperator, ======= public PostgresOperatorStatement(Operator resultOperator, >>>>>>> private static final Tap.InOutTap EXECUTE_TAP = Tap.createTimer("PostgresBaseStatement: execute shared"); private static final Tap.InOutTap ACQUIRE_LOCK_TAP = Tap.createTimer("PostgresBaseStatement: acquire shared lock"); public PostgresOperatorStatement(Operator resultOperator, <<<<<<< @Override protected Tap.InOutTap executeTap() { return EXECUTE_TAP; } @Override protected Tap.InOutTap acquireLockTap() { return ACQUIRE_LOCK_TAP; } protected Bindings getBindings() { return UndefBindings.only(); } ======= >>>>>>> @Override protected Tap.InOutTap executeTap() { return EXECUTE_TAP; } @Override protected Tap.InOutTap acquireLockTap() { return ACQUIRE_LOCK_TAP; }
<<<<<<< private DataTypeDescriptor sqlType; ======= private TInstance tInstance; >>>>>>> private DataTypeDescriptor sqlType; <<<<<<< public JsonResultColumn(String name, DataTypeDescriptor sqlType, AkType akType, PostgresType pgType, ======= public JsonResultColumn(String name, AkType akType, TInstance tInstance, >>>>>>> public JsonResultColumn(String name, DataTypeDescriptor sqlType, AkType akType, TInstance tInstance, PostgresType pgType, <<<<<<< this.pgType = pgType; ======= this.tInstance = tInstance; >>>>>>> this.tInstance = tInstance; this.pgType = pgType; <<<<<<< public PostgresType getPostgresType() { return pgType; } ======= public TInstance getTInstance() { return tInstance; } >>>>>>> public TInstance getTInstance() { return tInstance; } public PostgresType getPostgresType() { return pgType; } <<<<<<< if (sqlType != null) pgType = PostgresType.fromDerby(sqlType, null); } return new JsonResultColumn(name, sqlType, akType, pgType, nestedResultColumns); ======= return new JsonResultColumn(name, akType, tInstance, nestedResultColumns); >>>>>>> if (sqlType != null) pgType = PostgresType.fromDerby(sqlType, null); } return new JsonResultColumn(name, sqlType, akType, tInstance, pgType, nestedResultColumns);
<<<<<<< @Override public Explainer getExplainer() { Attributes att = new Attributes(); att.put(Label.NAME, PrimitiveExplainer.getInstance("UNION ALL")); for (Operator op : inputs) att.put(Label.INPUT_OPERATOR, op.getExplainer()); for (RowType type : inputTypes) att.put(Label.INPUT_TYPE, PrimitiveExplainer.getInstance(type)); att.put(Label.OUTPUT_TYPE, PrimitiveExplainer.getInstance(outputRowType)); return new OperationExplainer(Type.UNION_ALL, att); } private static final class Execution implements Cursor { ======= private class Execution extends OperatorExecutionBase implements Cursor { >>>>>>> @Override public Explainer getExplainer() { Attributes att = new Attributes(); att.put(Label.NAME, PrimitiveExplainer.getInstance("UNION ALL")); for (Operator op : inputs) att.put(Label.INPUT_OPERATOR, op.getExplainer()); for (RowType type : inputTypes) att.put(Label.INPUT_TYPE, PrimitiveExplainer.getInstance(type)); att.put(Label.OUTPUT_TYPE, PrimitiveExplainer.getInstance(outputRowType)); return new OperationExplainer(Type.UNION_ALL, att); } private class Execution extends OperatorExecutionBase implements Cursor {
<<<<<<< private BigDecimal value; ======= public MBigDecimalWrapper(String num) { value = new BigDecimal(num); } public MBigDecimalWrapper() { value = BigDecimal.ZERO; } >>>>>>> private BigDecimal value; public MBigDecimalWrapper(String num) { value = new BigDecimal(num); } public MBigDecimalWrapper() { value = BigDecimal.ZERO; } <<<<<<< @Override public BigDecimalWrapper abs() { value = value.abs(); return this; } ======= @Override public int getScale() { return value.scale(); } @Override public int getPrecision() { return value.precision(); } @Override public BigDecimalWrapper parseString(String num) { value = new BigDecimal (num); return this; } @Override public int compareTo(Object o) { if (o == null) return 1; return value.compareTo(((MBigDecimalWrapper)o).value); } @Override public BigDecimalWrapper round(int precision, int scale) { value = value.round(new MathContext(precision, RoundingMode.HALF_UP)); return this; } @Override public BigDecimalWrapper negate() { value = value.negate(); return this; } @Override public BigDecimalWrapper abs() { value = value.abs(); return this; } >>>>>>> @Override public BigDecimalWrapper abs() { value = value.abs(); return this; } public int getScale() { return value.scale(); } @Override public int getPrecision() { return value.precision(); } @Override public BigDecimalWrapper parseString(String num) { value = new BigDecimal (num); return this; } @Override public int compareTo(Object o) { if (o == null) return 1; return value.compareTo(((MBigDecimalWrapper)o).value); } @Override public BigDecimalWrapper round(int precision, int scale) { value = value.round(new MathContext(precision, RoundingMode.HALF_UP)); return this; } @Override public BigDecimalWrapper negate() { value = value.negate(); return this; }
<<<<<<< import com.akiban.server.error.CursorCloseBadException; import com.akiban.server.error.CursorIsUnknownException; import com.akiban.server.error.DisplayFilterSetException; import com.akiban.server.error.DuplicateKeyException; import com.akiban.server.error.InvalidOperationException; import com.akiban.server.error.NoSuchRowException; import com.akiban.server.error.PersistItErrorException; import com.akiban.server.error.RowDataCorruptionException; import com.akiban.server.error.ScanRetryAbandonedException; ======= >>>>>>> import com.akiban.server.error.CursorCloseBadException; import com.akiban.server.error.CursorIsUnknownException; import com.akiban.server.error.DisplayFilterSetException; import com.akiban.server.error.DuplicateKeyException; import com.akiban.server.error.InvalidOperationException; import com.akiban.server.error.NoSuchRowException; import com.akiban.server.error.PersistItErrorException; import com.akiban.server.error.RowDataCorruptionException; import com.akiban.server.error.ScanRetryAbandonedException; <<<<<<< public Exchange getExchange(final Session session, final Index index) { return treeService.getExchange(session, (IndexDef)index.indexDef()); ======= public Exchange getExchange(final Session session, final Index index) throws PersistitException { return treeService.getExchange(session, (IndexDef) index.indexDef()); >>>>>>> public Exchange getExchange(final Session session, final Index index) { return treeService.getExchange(session, (IndexDef)index.indexDef()); <<<<<<< int[] nKeyColumns, FieldDef[] hKeyFieldDefs, Object[] hKeyValues) { ======= int[] nKeyColumns, FieldDef[] hKeyFieldDefs, Object[] hKeyValues) throws Exception { PersistitKeyAppender appender = new PersistitKeyAppender(hEx.getKey()); >>>>>>> int[] nKeyColumns, FieldDef[] hKeyFieldDefs, Object[] hKeyValues) { PersistitKeyAppender appender = new PersistitKeyAppender(hEx.getKey()); <<<<<<< public static void constructIndexKey(Key iKey, RowData rowData, Index index, Key hKey) { ======= public static void constructIndexKey(PersistitKeyAppender iKeyAppender, RowData rowData, Index index, Key hKey) throws PersistitException { >>>>>>> public static void constructIndexKey(PersistitKeyAppender iKeyAppender, RowData rowData, Index index, Key hKey) {
<<<<<<< import com.akiban.server.RowData; import com.akiban.server.error.TableDefinitionMismatchException; ======= import com.akiban.server.rowdata.RowData; import com.akiban.server.api.dml.TableDefinitionMismatchException; >>>>>>> import com.akiban.server.error.TableDefinitionMismatchException; import com.akiban.server.rowdata.RowData;
<<<<<<< CsvRowReader reader = new CsvRowReader(toTable, toColumns, format, context); ======= DMLFunctions dml = dxlService.dmlFunctions(); CsvRowReader reader = new CsvRowReader(toTable, toColumns, inputStream, format, context); >>>>>>> CsvRowReader reader = new CsvRowReader(toTable, toColumns, inputStream, format, context); <<<<<<< reader.skipRows(inputStream, skipRows); return loadTableFromRowReader(session, inputStream, reader, commitFrequency); } @Override public long loadTableFromMysqlDump(Session session, InputStream inputStream, String encoding, UserTable toTable, List<Column> toColumns, long commitFrequency, QueryContext context) throws IOException { MysqlDumpRowReader reader = new MysqlDumpRowReader(toTable, toColumns, encoding, context); return loadTableFromRowReader(session, inputStream, reader, commitFrequency); } protected long loadTableFromRowReader(Session session, InputStream inputStream, RowReader reader, long commitFrequency) throws IOException { DMLFunctions dml = dxlService.dmlFunctions(); ======= reader.skipRows(skipRows); >>>>>>> reader.skipRows(skipRows); return loadTableFromRowReader(session, inputStream, reader, commitFrequency); } @Override public long loadTableFromMysqlDump(Session session, InputStream inputStream, String encoding, UserTable toTable, List<Column> toColumns, long commitFrequency, QueryContext context) throws IOException { MysqlDumpRowReader reader = new MysqlDumpRowReader(toTable, toColumns, inputStream, encoding, context); return loadTableFromRowReader(session, inputStream, reader, commitFrequency); } protected long loadTableFromRowReader(Session session, InputStream inputStream, RowReader reader, long commitFrequency) throws IOException { DMLFunctions dml = dxlService.dmlFunctions();
<<<<<<< import com.akiban.ais.model.AkibanInformationSchema; ======= import com.akiban.server.AkServerInterface; >>>>>>> import com.akiban.ais.model.AkibanInformationSchema; import com.akiban.server.AkServerInterface;
<<<<<<< ======= import com.akiban.server.types.conversion.Converters; import com.akiban.server.types3.TInstance; import com.akiban.server.types3.Types3Switch; import com.akiban.server.types3.pvalue.PValueSource; >>>>>>> import com.akiban.server.types.conversion.Converters; import com.akiban.server.types3.TInstance; import com.akiban.server.types3.Types3Switch; import com.akiban.server.types3.pvalue.PValueSource; <<<<<<< int flattenedIndex = irc.getFieldPosition(i); Column column = groupIndex.getColumnForFlattenedRow(flattenedIndex); ValueSource source = row.eval(flattenedIndex); indexRow.append(column, source); ======= final int flattenedIndex = irc.getFieldPosition(i); if (Types3Switch.ON) { PValueSource source = row.pvalue(flattenedIndex); TInstance sourceInstance = row.rowType().typeInstanceAt(flattenedIndex); sourceInstance.writeCollating(source, pTarget); } else { Column column = groupIndex.getColumnForFlattenedRow(flattenedIndex); ValueSource source = row.eval(flattenedIndex); Converters.convert(source, target.expectingType(column)); } >>>>>>> int flattenedIndex = irc.getFieldPosition(i); if (Types3Switch.ON) { PValueSource source = row.pvalue(flattenedIndex); TInstance sourceInstance = row.rowType().typeInstanceAt(flattenedIndex); sourceInstance.writeCollating(source, pTarget); } else { Column column = groupIndex.getColumnForFlattenedRow(flattenedIndex); ValueSource source = row.eval(flattenedIndex); indexRow.append(column, source); }
<<<<<<< import com.akiban.server.FieldDef; import com.akiban.server.RowDef; ======= import com.akiban.message.ErrorCode; import com.akiban.server.rowdata.FieldDef; import com.akiban.server.InvalidOperationException; import com.akiban.server.rowdata.RowDef; >>>>>>> import com.akiban.server.rowdata.FieldDef; import com.akiban.server.rowdata.RowDef;
<<<<<<< import com.fasterxml.jackson.databind.JsonNode; ======= import org.codehaus.jackson.JsonNode; import org.joda.time.format.ISODateTimeFormat; >>>>>>> import com.fasterxml.jackson.databind.JsonNode; import org.joda.time.format.ISODateTimeFormat; <<<<<<< Iterator<Entry<String,JsonNode>> i = node.fields(); ======= if(alwaysWithPK) { addPK(table); columnsAdded = true; } Iterator<Entry<String,JsonNode>> i = node.getFields(); >>>>>>> if(alwaysWithPK) { addPK(table); columnsAdded = true; } Iterator<Entry<String,JsonNode>> i = node.fields(); <<<<<<< String columnName = "_" + tableName.getTableName() + "_id"; i = node.fields(); ======= i = node.getFields(); >>>>>>> i = node.fields();
<<<<<<< import com.akiban.rest.RestFunctionInvoker; import com.akiban.rest.RestFunctionRegistrar; ======= import com.akiban.server.service.session.Session; >>>>>>> import com.akiban.rest.RestFunctionInvoker; import com.akiban.rest.RestFunctionRegistrar; import com.akiban.server.service.session.Session;
<<<<<<< import com.foundationdb.server.service.functions.FunctionsRegistry; import com.foundationdb.server.service.metrics.MetricsService; ======= import com.foundationdb.server.expressions.TypesRegistryService; >>>>>>> import com.foundationdb.server.service.metrics.MetricsService; import com.foundationdb.server.expressions.TypesRegistryService;
<<<<<<< import com.akiban.server.types3.TParsers; ======= import com.akiban.server.types3.common.TFormatter; >>>>>>> import com.akiban.server.types3.TParsers; import com.akiban.server.types3.common.TFormatter; <<<<<<< public static final NoAttrTClass INSTANCE = new NoAttrTClass(AkBundle.INSTANCE.id(), "boolean", 1, 1, 1, PUnderlying.BOOL, TParsers.BOOLEAN, TypeId.BOOLEAN_ID); ======= public static final NoAttrTClass INSTANCE = new NoAttrTClass( AkBundle.INSTANCE.id(), "boolean", TFormatter.FORMAT.BOOL, 1, 1, 1, PUnderlying.BOOL, TypeId.BOOLEAN_ID); >>>>>>> public static final NoAttrTClass INSTANCE = new NoAttrTClass(AkBundle.INSTANCE.id(), "boolean", TFormatter.FORMAT.BOOL, 1, 1, 1, PUnderlying.BOOL, TParsers.BOOLEAN, TypeId.BOOLEAN_ID);
<<<<<<< void constructHKey(final Session session, Exchange hEx, RowDef rowDef, RowData rowData) throws PersistitException, InvalidOperationException { ======= void constructHKey(Exchange hEx, RowDef rowDef, RowData rowData, boolean insertingRow) throws PersistitException, InvalidOperationException { >>>>>>> void constructHKey(final Session session, Exchange hEx, RowDef rowDef, RowData rowData, boolean insertingRow) throws PersistitException, InvalidOperationException { <<<<<<< constructHKey(session, hEx, rowDef, rowData); ======= constructHKey(hEx, rowDef, rowData, true); >>>>>>> constructHKey(session, hEx, rowDef, rowData, true); <<<<<<< constructHKey(session, hEx, rowDef, rowData); ======= constructHKey(hEx, rowDef, rowData, false); >>>>>>> constructHKey(session, hEx, rowDef, rowData, false); <<<<<<< final TableStatus ts = rowDef.getTableStatus(); constructHKey(session, hEx, rowDef, oldRowData); ======= final TableStatus ts = checkTableStatus(rowDefId); constructHKey(hEx, rowDef, oldRowData, false); >>>>>>> final TableStatus ts = rowDef.getTableStatus(); constructHKey(session, hEx, rowDef, oldRowData, false);
<<<<<<< import edu.umd.cs.findbugs.annotations.NonNull; ======= import javax.annotation.Nonnull; import javax.annotation.Nullable; >>>>>>> import edu.umd.cs.findbugs.annotations.NonNull; <<<<<<< public String resolve(@NonNull HttpRequest request) { ======= @Nullable public String resolve(@Nonnull HttpRequest request) { >>>>>>> @Nullable public String resolve(@NonNull HttpRequest request) {
<<<<<<< @Requires(property = ConfigurationClient.ENABLED, value = StringUtils.TRUE, defaultValue = StringUtils.FALSE) ======= @Requires(property = ConfigurationClient.ENABLED, value = "true", defaultValue = "false") @BootstrapContextCompatible >>>>>>> @Requires(property = ConfigurationClient.ENABLED, value = StringUtils.TRUE, defaultValue = StringUtils.FALSE) @BootstrapContextCompatible
<<<<<<< import io.micronaut.core.util.StringUtils; ======= import io.micronaut.core.util.ArgumentUtils; >>>>>>> import io.micronaut.core.util.StringUtils; import io.micronaut.core.util.ArgumentUtils;
<<<<<<< import io.micronaut.core.util.clhm.ConcurrentLinkedHashMap; ======= import io.micronaut.core.util.StringUtils; >>>>>>> import io.micronaut.core.util.clhm.ConcurrentLinkedHashMap; import io.micronaut.core.util.StringUtils;
<<<<<<< private final InvocationInstrumenter instrumenter; ======= private final InvocationInstrumenter onSubscribeInstrumenter; private final InvocationInstrumenter onResultInstrumenter; >>>>>>> private final InvocationInstrumenter onSubscribeInstrumenter; private final InvocationInstrumenter onResultInstrumenter; <<<<<<< this.instrumenter = RunOnceInvocationInstrumenter.create(instrumenterFactory); ======= this.onSubscribeInstrumenter = instrumenterFactory.create(); this.onResultInstrumenter = instrumenterFactory.create(); >>>>>>> this.onSubscribeInstrumenter = RunOnceInvocationInstrumenter.create(instrumenterFactory); this.onResultInstrumenter = RunOnceInvocationInstrumenter.create(instrumenterFactory); <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onSubscribeInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onSubscribeInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onSubscribeInstrumenter.beforeInvocation(); source.onSubscribe(d); } finally { onSubscribeInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onError(t); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onSuccess(value); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onComplete(); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>>
<<<<<<< ======= import com.typesafe.netty.HandlerPublisher; import com.typesafe.netty.HandlerSubscriber; import io.netty.channel.ChannelHandlerContext; >>>>>>> <<<<<<< import io.netty.channel.ChannelHandlerContext; ======= import io.netty.channel.ChannelPipeline; >>>>>>> import io.netty.channel.ChannelHandlerContext; import com.typesafe.netty.HandlerPublisher; import io.netty.channel.ChannelPipeline;
<<<<<<< ======= import io.micronaut.core.convert.exceptions.ConversionErrorException; >>>>>>> <<<<<<< import io.micronaut.http.client.multipart.MultipartBody; import io.micronaut.http.client.ssl.NettyClientSslBuilder; ======= import io.micronaut.http.codec.CodecException; >>>>>>> import io.micronaut.http.codec.CodecException; import io.micronaut.http.client.multipart.MultipartBody; import io.micronaut.http.client.ssl.NettyClientSslBuilder; <<<<<<< NettyRequestWriter requestWriter = prepareRequest(requestWrapper.get(), requestURI); io.netty.handler.codec.http.HttpRequest nettyRequest = requestWriter.getNettyRequest(); ======= io.netty.handler.codec.http.HttpRequest nettyRequest = prepareRequest(channel, requestWrapper.get(), requestURI); >>>>>>> NettyRequestWriter requestWriter = prepareRequest(requestWrapper.get(), requestURI); io.netty.handler.codec.http.HttpRequest nettyRequest = requestWriter.getNettyRequest(); <<<<<<< ======= HttpRequest nettyRequest = buildNettyRequest( channel, finalRequest, requestContentType, permitsBody); >>>>>>> <<<<<<< addFullHttpResponseHandler(request, channel, completableFuture, bodyType); requestWriter.write(channel); closeChannelAsync(channel); ======= addFullHttpResponseHandler(request, channel, emitter, bodyType); writeAndCloseRequest(channel, nettyRequest, emitter); >>>>>>> addFullHttpResponseHandler(request, channel, emitter, bodyType); requestWriter.writeAndClose(channel, emitter); <<<<<<< protected NettyRequestWriter buildNettyRequest( io.micronaut.http.HttpRequest request, ======= protected io.netty.handler.codec.http.HttpRequest buildNettyRequest( Channel channel, io.micronaut.http.HttpRequest request, >>>>>>> protected NettyRequestWriter buildNettyRequest( io.micronaut.http.HttpRequest request, <<<<<<< nettyRequest = clientHttpRequest.getNettyRequest(bodyContent); ======= nettyRequest = clientHttpRequest.getFullRequest(bodyContent); >>>>>>> nettyRequest = clientHttpRequest.getFullRequest(bodyContent); <<<<<<< ======= private ByteBuf charSequenceToByteBuf(CharSequence bodyValue, MediaType requestContentType) { CharSequence charSequence = bodyValue; return byteBufferFactory.copiedBuffer( charSequence.toString().getBytes( requestContentType.getCharset().orElse(defaultCharset) ) ).asNativeBuffer(); } >>>>>>> private ByteBuf charSequenceToByteBuf(CharSequence bodyValue, MediaType requestContentType) { CharSequence charSequence = bodyValue; return byteBufferFactory.copiedBuffer( charSequence.toString().getBytes( requestContentType.getCharset().orElse(defaultCharset) ) ).asNativeBuffer(); } <<<<<<< private <O> void addFullHttpResponseHandler(io.micronaut.http.HttpRequest<?> request, Channel channel, CompletableFuture<io.micronaut.http.HttpResponse<O>> completableFuture, io.micronaut.core.type.Argument<O> bodyType) { ======= private void writeAndCloseRequest(Channel channel, HttpRequest nettyRequest, FlowableEmitter<?> emitter) { channel.writeAndFlush(nettyRequest).addListener(f -> { try { if(!f.isSuccess()) { emitter.onError(f.cause()); } } finally { closeChannelAsync(channel); } }); } private <O> void addFullHttpResponseHandler(io.micronaut.http.HttpRequest<?> request, Channel channel, Emitter<io.micronaut.http.HttpResponse<O>> emitter, io.micronaut.core.type.Argument<O> bodyType) { >>>>>>> private void writeAndCloseRequest(Channel channel, HttpRequest nettyRequest, FlowableEmitter<?> emitter) { channel.writeAndFlush(nettyRequest).addListener(f -> { try { if(!f.isSuccess()) { emitter.onError(f.cause()); } } finally { closeChannelAsync(channel); } }); } private <O> void addFullHttpResponseHandler(io.micronaut.http.HttpRequest<?> request, Channel channel, Emitter<io.micronaut.http.HttpResponse<O>> emitter, io.micronaut.core.type.Argument<O> bodyType) { <<<<<<< private HttpPostRequestEncoder buildFormDataRequest(io.netty.handler.codec.http.HttpRequest request, Object bodyValue) throws HttpPostRequestEncoder.ErrorDataEncoderException { HttpPostRequestEncoder postRequestEncoder = new HttpPostRequestEncoder(request, false); ======= private io.netty.handler.codec.http.HttpRequest buildFormDataRequest(NettyClientHttpRequest clientHttpRequest, Object bodyValue) throws HttpPostRequestEncoder.ErrorDataEncoderException { HttpPostRequestEncoder postRequestEncoder = new HttpPostRequestEncoder(clientHttpRequest.getFullRequest(null), false); >>>>>>> private HttpPostRequestEncoder buildFormDataRequest(NettyClientHttpRequest clientHttpRequest, Object bodyValue) throws HttpPostRequestEncoder.ErrorDataEncoderException { HttpPostRequestEncoder postRequestEncoder = new HttpPostRequestEncoder(clientHttpRequest.getFullRequest(null), false); <<<<<<< private <I> NettyRequestWriter prepareRequest(io.micronaut.http.HttpRequest<I> request, URI requestURI) throws HttpPostRequestEncoder.ErrorDataEncoderException { ======= private <I> io.netty.handler.codec.http.HttpRequest prepareRequest(Channel channel, io.micronaut.http.HttpRequest<I> request, URI requestURI) throws HttpPostRequestEncoder.ErrorDataEncoderException { >>>>>>> private <I> NettyRequestWriter prepareRequest(io.micronaut.http.HttpRequest<I> request, URI requestURI) throws HttpPostRequestEncoder.ErrorDataEncoderException { <<<<<<< NettyClientHttpRequest clientHttpRequest = (NettyClientHttpRequest) request; NettyRequestWriter requestWriter = buildNettyRequest(clientHttpRequest, requestContentType, permitsBody); io.netty.handler.codec.http.HttpRequest nettyRequest = requestWriter.getNettyRequest(); ======= io.netty.handler.codec.http.HttpRequest nettyRequest = buildNettyRequest( channel, request, requestContentType, permitsBody); >>>>>>> NettyClientHttpRequest clientHttpRequest = (NettyClientHttpRequest) request; NettyRequestWriter requestWriter = buildNettyRequest(clientHttpRequest, requestContentType, permitsBody); io.netty.handler.codec.http.HttpRequest nettyRequest = requestWriter.getNettyRequest();
<<<<<<< AnnotationMetadata annotationMetadata = DefaultAnnotationMetadata.mutateMember( AnnotationMetadata.EMPTY_METADATA, PropertySource.class.getName(), AnnotationMetadata.VALUE_MEMBER, Collections.singletonList( new io.micronaut.core.annotation.AnnotationValue( Property.class.getName(), Collections.singletonMap( "name", propertyMetadata.getPath() ) ) ) ); boolean requiresReflection = modelUtils.isPrivate(method); if (!requiresReflection && modelUtils.isProtected(method)) { PackageElement declaringPackage = elementUtils.getPackageOf(declaringClass); PackageElement concretePackage = elementUtils.getPackageOf(this.concreteClass); requiresReflection = !declaringPackage.getQualifiedName().equals(concretePackage.getQualifiedName()); } writer.visitSetterValue( modelUtils.resolveTypeReference(declaringClass), annotationMetadata, requiresReflection, fieldType, setterName, genericTypes, annotationUtils.getAnnotationMetadata(method.getParameters().get(0)), true); ======= AnnotationMetadata annotationMetadata = DefaultAnnotationMetadata.mutateMember( AnnotationMetadata.EMPTY_METADATA, PropertySource.class.getName(), AnnotationMetadata.VALUE_MEMBER, Collections.singletonList( new io.micronaut.core.annotation.AnnotationValue( Property.class.getName(), Collections.singletonMap( "name", propertyMetadata.getPath() ) ) ) ); writer.visitSetterValue( modelUtils.resolveTypeReference(declaringClass), annotationMetadata, modelUtils.isPrivate(method), fieldType, setterName, genericTypes, annotationUtils.getAnnotationMetadata(method.getParameters().get(0)), true); } >>>>>>> AnnotationMetadata annotationMetadata = DefaultAnnotationMetadata.mutateMember( AnnotationMetadata.EMPTY_METADATA, PropertySource.class.getName(), AnnotationMetadata.VALUE_MEMBER, Collections.singletonList( new io.micronaut.core.annotation.AnnotationValue( Property.class.getName(), Collections.singletonMap( "name", propertyMetadata.getPath() ) ) ) ); boolean requiresReflection = modelUtils.isPrivate(method); if (!requiresReflection && modelUtils.isProtected(method)) { PackageElement declaringPackage = elementUtils.getPackageOf(declaringClass); PackageElement concretePackage = elementUtils.getPackageOf(this.concreteClass); requiresReflection = !declaringPackage.getQualifiedName().equals(concretePackage.getQualifiedName()); } writer.visitSetterValue( modelUtils.resolveTypeReference(declaringClass), annotationMetadata, requiresReflection, fieldType, setterName, genericTypes, annotationUtils.getAnnotationMetadata(method.getParameters().get(0)), true); }
<<<<<<< @Requires(property = ConfigurationClient.ENABLED, value = StringUtils.TRUE, defaultValue = StringUtils.FALSE) ======= @Requires(property = ConfigurationClient.ENABLED, value = "true", defaultValue = "false") @BootstrapContextCompatible >>>>>>> @Requires(property = ConfigurationClient.ENABLED, value = StringUtils.TRUE, defaultValue = StringUtils.FALSE) @BootstrapContextCompatible
<<<<<<< writer.visitConfigBuilderFieldStart(fieldType, fieldName); ======= String fieldName = field.getSimpleName().toString(); ConfigBuilder configBuilder = new ConfigBuilder(fieldType).forField(fieldName); writer.visitConfigBuilderStart(configBuilder); >>>>>>> ConfigBuilder configBuilder = new ConfigBuilder(fieldType).forField(fieldName); writer.visitConfigBuilderStart(configBuilder);
<<<<<<< if (children == null) { throw new IOException("Cannot list content of directory " + f.getAbsolutePath()); } if (children.length > 0 && !recurse) return false; for (String child : children) { delete(new File(f, child), true); ======= if (children != null) { if (children.length > 0 && !recurse) return false; for (String child : children) { delete(new File(f, child), true); } } else { logger.debug("Unexpected null listing files in {}", f.getAbsolutePath()); >>>>>>> if (children == null) { throw new IOException("Cannot list content of directory " + f.getAbsolutePath()); } if (children != null) { if (children.length > 0 && !recurse) return false; for (String child : children) { delete(new File(f, child), true); } } else { logger.debug("Unexpected null listing files in {}", f.getAbsolutePath());
<<<<<<< private final InvocationInstrumenter instrumenter; ======= private final InvocationInstrumenter onSubscribeInstrumenter; private final InvocationInstrumenter onResultInstrumenter; >>>>>>> private final InvocationInstrumenter onSubscribeInstrumenter; private final InvocationInstrumenter onResultInstrumenter; <<<<<<< this.instrumenter = RunOnceInvocationInstrumenter.create(instrumenterFactory); ======= this.onSubscribeInstrumenter = instrumenterFactory.create(); this.onResultInstrumenter = instrumenterFactory.create(); >>>>>>> this.onSubscribeInstrumenter = RunOnceInvocationInstrumenter.create(instrumenterFactory); this.onResultInstrumenter = RunOnceInvocationInstrumenter.create(instrumenterFactory); <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onSubscribeInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onSubscribeInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onSubscribeInstrumenter.beforeInvocation(); source.onSubscribe(d); } finally { onSubscribeInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onNext(t); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onError(t); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onComplete(); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>>
<<<<<<< /** The general role prefix, to be added to all the LDAP roles that do not start by one of the exclude prefixes */ private String rolePrefix; /** A Set of roles to be added to all the users authenticated using this LDAP instance */ private Set<GrantedAuthority> setExtraRoles = new HashSet<>(); /** A Set of prefixes. When a role starts with any of these, the role prefix defined above will not be prepended */ private Set<String> setExcludePrefixes = new HashSet<>(); ======= /** Opencast's security service */ private SecurityService securityService; >>>>>>> /** Opencast's security service */ private SecurityService securityService; /** The general role prefix, to be added to all the LDAP roles that do not start by one of the exclude prefixes */ private String rolePrefix; /** A Set of roles to be added to all the users authenticated using this LDAP instance */ private Set<GrantedAuthority> setExtraRoles = new HashSet<>(); /** A Set of prefixes. When a role starts with any of these, the role prefix defined above will not be prepended */ private Set<String> setExcludePrefixes = new HashSet<>(); <<<<<<< * @param groupRoleProvider ======= * @param securityService * a reference to Opencast's security service >>>>>>> * @param securityService * a reference to Opencast's security service <<<<<<< String userDn, String password, String roleAttributesGlob, String rolePrefix, String[] extraRoles, String[] excludePrefixes, boolean convertToUppercase, int cacheSize, int cacheExpiration) { ======= String userDn, String password, String roleAttributesGlob, String rolePrefix, int cacheSize, int cacheExpiration, SecurityService securityService) { >>>>>>> String userDn, String password, String roleAttributesGlob, String rolePrefix, String[] extraRoles, String[] excludePrefixes, boolean convertToUppercase, int cacheSize, int cacheExpiration, SecurityService securityService) {
<<<<<<< private final InvocationInstrumenter instrumenter; ======= private final InvocationInstrumenter onSubscribeInstrumenter; private final InvocationInstrumenter onResultInstrumenter; >>>>>>> private final InvocationInstrumenter onSubscribeInstrumenter; private final InvocationInstrumenter onResultInstrumenter; <<<<<<< ======= } else { try { onSubscribeInstrumenter.beforeInvocation(); source.onSubscribe(s); } finally { onSubscribeInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onNext(t); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onError(t); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>> <<<<<<< try (Instrumentation ignored = instrumenter.newInstrumentation()) { ======= if (onResultInstrumenter == null) { >>>>>>> try (Instrumentation ignored = onResultInstrumenter.newInstrumentation()) { <<<<<<< ======= } else { try { onResultInstrumenter.beforeInvocation(); source.onComplete(); } finally { onResultInstrumenter.afterInvocation(); } >>>>>>>
<<<<<<< ======= import io.micronaut.context.annotation.BootstrapContextCompatible; import io.micronaut.context.exceptions.ConfigurationException; import io.micronaut.core.annotation.Internal; import io.micronaut.core.convert.ConversionContext; import io.micronaut.core.convert.format.Format; import io.micronaut.core.io.buffer.ByteBuffer; import io.micronaut.http.client.annotation.Client; import io.micronaut.http.codec.CodecConfiguration; >>>>>>>
<<<<<<< ComputePlatform computePlatform = determineCloudProvider(); if (computePlatform != null) { switch (computePlatform) { case GOOGLE_COMPUTE: //instantiate bean for GC metadata discovery environments.add(GOOGLE_COMPUTE); environments.add(Environment.CLOUD); break; case AMAZON_EC2: //instantiate bean for ec2 metadata discovery environments.add(AMAZON_EC2); environments.add(Environment.CLOUD); break; case AZURE: // not yet implemented environments.add(AZURE); environments.add(Environment.CLOUD); break; case IBM: // not yet implemented environments.add(IBM); environments.add(Environment.CLOUD); break; case DIGITAL_OCEAN: environments.add(DIGITAL_OCEAN); environments.add(Environment.CLOUD); break; case OTHER: // do nothing here break; default: // no-op ======= if (deduceComputePlatform) { ComputePlatform computePlatform = determineCloudProvider(); if (computePlatform != null) { switch (computePlatform) { case GOOGLE_COMPUTE: //instantiate bean for GC metadata discovery environments.add(GOOGLE_COMPUTE); environments.add(Environment.CLOUD); break; case AMAZON_EC2: //instantiate bean for ec2 metadata discovery environments.add(AMAZON_EC2); environments.add(Environment.CLOUD); break; case AZURE: // not yet implemented environments.add(AZURE); environments.add(Environment.CLOUD); break; case IBM: // not yet implemented environments.add(IBM); environments.add(Environment.CLOUD); break; case OTHER: // do nothing here break; default: // no-op } >>>>>>> if (deduceComputePlatform) { ComputePlatform computePlatform = determineCloudProvider(); if (computePlatform != null) { switch (computePlatform) { case GOOGLE_COMPUTE: //instantiate bean for GC metadata discovery environments.add(GOOGLE_COMPUTE); environments.add(Environment.CLOUD); break; case AMAZON_EC2: //instantiate bean for ec2 metadata discovery environments.add(AMAZON_EC2); environments.add(Environment.CLOUD); break; case AZURE: // not yet implemented environments.add(AZURE); environments.add(Environment.CLOUD); break; case IBM: // not yet implemented environments.add(IBM); environments.add(Environment.CLOUD); break; case DIGITAL_OCEAN: environments.add(DIGITAL_OCEAN); environments.add(Environment.CLOUD); break; case OTHER: // do nothing here break; default: // no-op }
<<<<<<< super(sourceUnit, annotatedNode, annotationMetadata); this.type = type; ======= super(annotatedNode, annotationMetadata); >>>>>>> super(sourceUnit, annotatedNode, annotationMetadata);
<<<<<<< private Optional<Map<String, Object>> readPropertiesFromLoader(String fileName, String filePath, PropertySourceLoader propertySourceLoader) throws ConfigurationException { ResourceResolver resourceResolver = new ResourceResolver(); Optional<ResourceLoader> resourceLoader = resourceResolver.getSupportingLoader(filePath); ResourceLoader loader = resourceLoader.orElse(FileSystemResourceLoader.defaultLoader()); try { Optional<InputStream> inputStream = loader.getResourceAsStream(filePath); if (inputStream.isPresent()) { return Optional.of(propertySourceLoader.read(fileName, inputStream.get())); } else { return Optional.empty(); ======= private void readPropertySourceFromLoader(String fileName, String filePath, PropertySourceLoader propertySourceLoader, List<PropertySource> propertySources) throws ConfigurationException { if (!this.propertySources.containsKey(filePath)) { ResourceResolver resourceResolver = new ResourceResolver(); Optional<ResourceLoader> resourceLoader = resourceResolver.getSupportingLoader(filePath); ResourceLoader loader = resourceLoader.orElse(FileSystemResourceLoader.defaultLoader()); try { Optional<InputStream> inputStream = loader.getResourceAsStream(filePath); if (inputStream.isPresent()) { Map<String, Object> properties = propertySourceLoader.read(fileName, inputStream.get()); propertySources.add(PropertySource.of(filePath, properties)); } else { if (LOG.isWarnEnabled()) { LOG.warn("Unable to load properties file: {}", fileName); } } } catch (IOException e) { throw new ConfigurationException("Unsupported properties file: " + fileName); >>>>>>> private Optional<Map<String, Object>> readPropertiesFromLoader(String fileName, String filePath, PropertySourceLoader propertySourceLoader) throws ConfigurationException { ResourceResolver resourceResolver = new ResourceResolver(); Optional<ResourceLoader> resourceLoader = resourceResolver.getSupportingLoader(filePath); ResourceLoader loader = resourceLoader.orElse(FileSystemResourceLoader.defaultLoader()); try { Optional<InputStream> inputStream = loader.getResourceAsStream(filePath); if (inputStream.isPresent()) { return Optional.of(propertySourceLoader.read(fileName, inputStream.get())); } else { if (LOG.isWarnEnabled()) { LOG.warn("Unable to load properties file: {}", fileName); } return Optional.empty();
<<<<<<< import io.micronaut.core.annotation.Nullable; ======= >>>>>>> import io.micronaut.core.annotation.Nullable;
<<<<<<< * @param element The element ======= * @param declaringType The declaring type * @param element The element >>>>>>> * @param declaringType The declaring type * @param element The element <<<<<<< /** * Annotate an existing annotation metadata object. * * @param annotationMetadata The annotation metadata * @param annotationValue The annotation value * @param <A2> The annotation type * @return The mutated metadata */ public <A2 extends Annotation> AnnotationMetadata annotate( AnnotationMetadata annotationMetadata, AnnotationValue<A2> annotationValue) { if (annotationMetadata instanceof DefaultAnnotationMetadata) { final Optional<T> annotationMirror = getAnnotationMirror(annotationValue.getAnnotationName()); final DefaultAnnotationMetadata defaultMetadata = (DefaultAnnotationMetadata) annotationMetadata; defaultMetadata.addDeclaredAnnotation( annotationValue.getAnnotationName(), annotationValue.getValues() ); annotationMirror.ifPresent(annotationType -> processAnnotationStereotypes( defaultMetadata, true, annotationType, annotationValue.getAnnotationName() ) ); } return annotationMetadata; } ======= /** * Key used to reference mutated metadata. * * @param <T> the element type */ private static class MetadataKey<T> { final String declaringName; final T element; MetadataKey(String declaringName, T element) { this.declaringName = declaringName; this.element = element; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } MetadataKey that = (MetadataKey) o; return declaringName.equals(that.declaringName) && element.equals(that.element); } @Override public int hashCode() { return Objects.hash(declaringName, element); } } >>>>>>> /** * Annotate an existing annotation metadata object. * * @param annotationMetadata The annotation metadata * @param annotationValue The annotation value * @param <A2> The annotation type * @return The mutated metadata */ public <A2 extends Annotation> AnnotationMetadata annotate( AnnotationMetadata annotationMetadata, AnnotationValue<A2> annotationValue) { if (annotationMetadata instanceof DefaultAnnotationMetadata) { final Optional<T> annotationMirror = getAnnotationMirror(annotationValue.getAnnotationName()); final DefaultAnnotationMetadata defaultMetadata = (DefaultAnnotationMetadata) annotationMetadata; defaultMetadata.addDeclaredAnnotation( annotationValue.getAnnotationName(), annotationValue.getValues() ); annotationMirror.ifPresent(annotationType -> processAnnotationStereotypes( defaultMetadata, true, annotationType, annotationValue.getAnnotationName() ) ); } return annotationMetadata; } /** * Key used to reference mutated metadata. * * @param <T> the element type */ private static class MetadataKey<T> { final String declaringName; final T element; MetadataKey(String declaringName, T element) { this.declaringName = declaringName; this.element = element; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } MetadataKey that = (MetadataKey) o; return declaringName.equals(that.declaringName) && element.equals(that.element); } @Override public int hashCode() { return Objects.hash(declaringName, element); } }
<<<<<<< List<UriRoute> routes = new ArrayList<>(2); MediaType[] consumes = method.getValue(Consumes.class, String[].class).map((types) -> Arrays.stream(types).map(MediaType::new).toArray(MediaType[]::new) ).orElse(null); MediaType[] produces = method.getValue(Produces.class, String[].class).map((types) -> Arrays.stream(types).map(MediaType::new).toArray(MediaType[]::new) ).orElse(null); ======= UriRoute route = null; MediaType[] consumes = Arrays.stream(method.stringValues(Consumes.class)).map(MediaType::new).toArray(MediaType[]::new); MediaType[] produces = Arrays.stream(method.stringValues(Produces.class)).map(MediaType::new).toArray(MediaType[]::new); >>>>>>> List<UriRoute> routes = new ArrayList<>(2); MediaType[] consumes = Arrays.stream(method.stringValues(Consumes.class)).map(MediaType::new).toArray(MediaType[]::new); MediaType[] produces = Arrays.stream(method.stringValues(Produces.class)).map(MediaType::new).toArray(MediaType[]::new);
<<<<<<< context.findAnnotation(Version.class) .flatMap(versionAnnotation -> versionAnnotation.getValue(String.class)) .filter(StringUtils::isNotEmpty) .ifPresent(version -> { ClientVersioningConfiguration configuration = getVersioningConfiguration(clientAnnotation); configuration.getHeaders() .forEach(header -> headers.put(header, version)); configuration.getParameters() .forEach(parameter -> queryParams.put(parameter, version)); }); ======= Map<String, Object> attributes = new LinkedHashMap<>(ATTRIBUTES_INITIAL_CAPACITY); List<AnnotationValue<RequestAttribute>> attributeAnnotations = context.getAnnotationValuesByType(RequestAttribute.class); for (AnnotationValue<RequestAttribute> attributeAnnotation : attributeAnnotations) { String attributeName = attributeAnnotation.get("name", String.class).orElse(null); Object attributeValue = attributeAnnotation.getValue(Object.class).orElse(null); if (StringUtils.isNotEmpty(attributeName) && attributeValue != null) { attributes.put(attributeName, attributeValue); } } >>>>>>> context.findAnnotation(Version.class) .flatMap(versionAnnotation -> versionAnnotation.getValue(String.class)) .filter(StringUtils::isNotEmpty) .ifPresent(version -> { ClientVersioningConfiguration configuration = getVersioningConfiguration(clientAnnotation); configuration.getHeaders() .forEach(header -> headers.put(header, version)); configuration.getParameters() .forEach(parameter -> queryParams.put(parameter, version)); }); Map<String, Object> attributes = new LinkedHashMap<>(ATTRIBUTES_INITIAL_CAPACITY); List<AnnotationValue<RequestAttribute>> attributeAnnotations = context.getAnnotationValuesByType(RequestAttribute.class); for (AnnotationValue<RequestAttribute> attributeAnnotation : attributeAnnotations) { String attributeName = attributeAnnotation.get("name", String.class).orElse(null); Object attributeValue = attributeAnnotation.getValue(Object.class).orElse(null); if (StringUtils.isNotEmpty(attributeName) && attributeValue != null) { attributes.put(attributeName, attributeValue); } }
<<<<<<< * Returns the permutation's strong name. This can be used to distinguish * between different permutations of the same module. In hosted mode, this * method will return {@value #HOSTED_MODE_PERMUTATION_ID}. */ public static String getPermutationStrongName() { if (GWT.isScript()) { return Impl.getPermutationStrongName(); } else { return HOSTED_MODE_PERMUTATION_STRONG_NAME; } } /** * @deprecated Use {@link Object#getClass()}, {@link Class#getName()}. ======= * @deprecated Use {@link Object#getClass()}, {@link Class#getName()} >>>>>>> * Returns the permutation's strong name. This can be used to distinguish * between different permutations of the same module. In hosted mode, this * method will return {@value #HOSTED_MODE_PERMUTATION_ID}. */ public static String getPermutationStrongName() { if (GWT.isScript()) { return Impl.getPermutationStrongName(); } else { return HOSTED_MODE_PERMUTATION_STRONG_NAME; } } /** * @deprecated Use {@link Object#getClass()}, {@link Class#getName()} <<<<<<< * Run the specified callback once the necessary code for it has been loaded. */ public static void runAsync(RunAsyncCallback callback) { /* * By default, just call the callback. This allows using * <code>runAsync</code> in code that might or might not run in a web * browser. */ UncaughtExceptionHandler handler = sUncaughtExceptionHandler; if (handler == null) { callback.onSuccess(); } else { try { callback.onSuccess(); } catch (Throwable e) { handler.onUncaughtException(e); } } } /** ======= >>>>>>> * Run the specified callback once the necessary code for it has been loaded. */ public static void runAsync(RunAsyncCallback callback) { /* * By default, just call the callback. This allows using * <code>runAsync</code> in code that might or might not run in a web * browser. */ UncaughtExceptionHandler handler = sUncaughtExceptionHandler; if (handler == null) { callback.onSuccess(); } else { try { callback.onSuccess(); } catch (Throwable e) { handler.onUncaughtException(e); } } } /**
<<<<<<< @SuppressWarnings("unchecked") public void addGeneratedCompilationUnits(TreeLogger logger, Set<? extends CompilationUnit> generatedCups) { for (CompilationUnit unit : generatedCups) { String typeName = unit.getTypeName(); assert (!unitMap.containsKey(typeName)); unitMap.put(typeName, unit); ======= @SuppressWarnings("unchecked") public void addGeneratedCompilationUnits(TreeLogger logger, Set<? extends CompilationUnit> generatedCups) { logger = logger.branch(TreeLogger.DEBUG, "Adding '" + generatedCups.size() + "' new generated units"); for (CompilationUnit unit : generatedCups) { String typeName = unit.getTypeName(); assert (!unitMap.containsKey(typeName)); unitMap.put(typeName, unit); >>>>>>> @SuppressWarnings("unchecked") public void addGeneratedCompilationUnits(TreeLogger logger, Set<? extends CompilationUnit> generatedCups) { logger = logger.branch(TreeLogger.DEBUG, "Adding '" + generatedCups.size() + "' new generated units"); for (CompilationUnit unit : generatedCups) { String typeName = unit.getTypeName(); assert (!unitMap.containsKey(typeName)); unitMap.put(typeName, unit); <<<<<<< public void refresh(TreeLogger logger) { ======= public void refresh(TreeLogger logger) { logger = logger.branch(TreeLogger.DEBUG, "Refreshing module from source"); >>>>>>> public void refresh(TreeLogger logger) { logger = logger.branch(TreeLogger.DEBUG, "Refreshing module from source"); <<<<<<< /* * Only retain state for units marked as CHECKED; because CHECKED units * won't be revalidated. */ Set<CompilationUnit> toRetain = new HashSet<CompilationUnit>(exposedUnits); for (Iterator<CompilationUnit> it = toRetain.iterator(); it.hasNext();) { CompilationUnit unit = it.next(); if (unit.getState() != State.CHECKED) { it.remove(); } } invalidatorState.retainAll(toRetain); jdtCompiler = new JdtCompiler(); compile(logger, getCompilationUnits()); mediator.refresh(logger, getCompilationUnits()); markSurvivorsChecked(getCompilationUnits()); } /** * Compile units and update their internal state. Invalidate any units with * compile errors. */ private void compile(TreeLogger logger, Set<CompilationUnit> newUnits) { PerfLogger.start("CompilationState.compile"); if (jdtCompiler.doCompile(newUnits)) { // Dump all units with direct errors; we cannot safely check them. boolean anyErrors = CompilationUnitInvalidator.invalidateUnitsWithErrors( logger, newUnits); // Check all units using our custom checks. CompilationUnitInvalidator.validateCompilationUnits(invalidatorState, newUnits, jdtCompiler.getBinaryTypeNames()); // More units may have errors now. anyErrors |= CompilationUnitInvalidator.invalidateUnitsWithErrors(logger, newUnits); if (anyErrors) { CompilationUnitInvalidator.invalidateUnitsWithInvalidRefs(logger, newUnits); } JsniCollector.collectJsniMethods(logger, newUnits, new JsProgram()); } PerfLogger.end(); ======= jdtCompiler = new JdtCompiler(); compile(logger, getCompilationUnits()); mediator.refresh(logger, getCompilationUnits()); markSurvivorsChecked(getCompilationUnits()); } /** * Compile units and update their internal state. Invalidate any units with * compile errors. */ private void compile(TreeLogger logger, Set<CompilationUnit> newUnits) { PerfLogger.start("CompilationState.compile"); if (jdtCompiler.doCompile(newUnits)) { logger = logger.branch(TreeLogger.DEBUG, "Validating newly compiled units"); // Dump all units with direct errors; we cannot safely check them. boolean anyErrors = CompilationUnitInvalidator.invalidateUnitsWithErrors( logger, newUnits); // Check all units using our custom checks. CompilationUnitInvalidator.validateCompilationUnits(newUnits, jdtCompiler.getBinaryTypeNames()); // More units may have errors now. anyErrors |= CompilationUnitInvalidator.invalidateUnitsWithErrors(logger, newUnits); if (anyErrors) { CompilationUnitInvalidator.invalidateUnitsWithInvalidRefs(logger, newUnits); } JsniCollector.collectJsniMethods(logger, newUnits, new JsProgram()); } PerfLogger.end(); >>>>>>> /* * Only retain state for units marked as CHECKED; because CHECKED units * won't be revalidated. */ Set<CompilationUnit> toRetain = new HashSet<CompilationUnit>(exposedUnits); for (Iterator<CompilationUnit> it = toRetain.iterator(); it.hasNext();) { CompilationUnit unit = it.next(); if (unit.getState() != State.CHECKED) { it.remove(); } } invalidatorState.retainAll(toRetain); jdtCompiler = new JdtCompiler(); compile(logger, getCompilationUnits()); mediator.refresh(logger, getCompilationUnits()); markSurvivorsChecked(getCompilationUnits()); } /** * Compile units and update their internal state. Invalidate any units with * compile errors. */ private void compile(TreeLogger logger, Set<CompilationUnit> newUnits) { PerfLogger.start("CompilationState.compile"); if (jdtCompiler.doCompile(newUnits)) { logger = logger.branch(TreeLogger.DEBUG, "Validating newly compiled units"); // Dump all units with direct errors; we cannot safely check them. boolean anyErrors = CompilationUnitInvalidator.invalidateUnitsWithErrors( logger, newUnits); // Check all units using our custom checks. CompilationUnitInvalidator.validateCompilationUnits(invalidatorState, newUnits, jdtCompiler.getBinaryTypeNames()); // More units may have errors now. anyErrors |= CompilationUnitInvalidator.invalidateUnitsWithErrors(logger, newUnits); if (anyErrors) { CompilationUnitInvalidator.invalidateUnitsWithInvalidRefs(logger, newUnits); } JsniCollector.collectJsniMethods(logger, newUnits, new JsProgram()); } PerfLogger.end();
<<<<<<< // Curator used to determine which node is coordinator private final CuratorFramework curatorClient; private final String nodesPathPrefix; private final String coordinatorPath; private final NiFiProperties nifiProperties; ======= private final LeaderElectionManager leaderElectionManager; private final AtomicLong latestUpdateId = new AtomicLong(-1); >>>>>>> private final NiFiProperties nifiProperties; private final LeaderElectionManager leaderElectionManager; private final AtomicLong latestUpdateId = new AtomicLong(-1); <<<<<<< public NodeClusterCoordinator(final ClusterCoordinationProtocolSenderListener senderListener, final EventReporter eventReporter, final ClusterNodeFirewall firewall, final RevisionManager revisionManager, final NiFiProperties nifiProperties) { ======= public NodeClusterCoordinator(final ClusterCoordinationProtocolSenderListener senderListener, final EventReporter eventReporter, final LeaderElectionManager leaderElectionManager, final ClusterNodeFirewall firewall, final RevisionManager revisionManager) { >>>>>>> public NodeClusterCoordinator(final ClusterCoordinationProtocolSenderListener senderListener, final EventReporter eventReporter, final LeaderElectionManager leaderElectionManager, final ClusterNodeFirewall firewall, final RevisionManager revisionManager, final NiFiProperties nifiProperties) { <<<<<<< this.nifiProperties = nifiProperties; final RetryPolicy retryPolicy = new RetryNTimes(10, 500); final ZooKeeperClientConfig zkConfig = ZooKeeperClientConfig.createConfig(nifiProperties); curatorClient = CuratorFrameworkFactory.newClient(zkConfig.getConnectString(), zkConfig.getSessionTimeoutMillis(), zkConfig.getConnectionTimeoutMillis(), retryPolicy); curatorClient.start(); nodesPathPrefix = zkConfig.resolvePath("cluster/nodes"); coordinatorPath = nodesPathPrefix + "/coordinator"; ======= this.leaderElectionManager = leaderElectionManager; >>>>>>> this.nifiProperties = nifiProperties; this.leaderElectionManager = leaderElectionManager; <<<<<<< ======= @Override public List<NodeConnectionStatus> getConnectionStatuses() { return new ArrayList<>(nodeStatuses.values()); } >>>>>>> @Override public List<NodeConnectionStatus> getConnectionStatuses() { return new ArrayList<>(nodeStatuses.values()); } <<<<<<< return nodeStatuses.values().stream() .filter(status -> status.getRoles().contains(ClusterRoles.PRIMARY_NODE)) .findFirst() .map(status -> status.getNodeIdentifier()) .orElse(null); ======= final String primaryNodeAddress = leaderElectionManager.getLeader(ClusterRoles.PRIMARY_NODE); if (primaryNodeAddress == null) { return null; } return nodeStatuses.keySet().stream() .filter(nodeId -> primaryNodeAddress.equals(nodeId.getSocketAddress() + ":" + nodeId.getSocketPort())) .findFirst() .orElse(null); >>>>>>> final String primaryNodeAddress = leaderElectionManager.getLeader(ClusterRoles.PRIMARY_NODE); if (primaryNodeAddress == null) { return null; } return nodeStatuses.keySet().stream() .filter(nodeId -> primaryNodeAddress.equals(nodeId.getSocketAddress() + ":" + nodeId.getSocketPort())) .findFirst() .orElse(null); <<<<<<< return new ConnectionResponse(resolvedNodeIdentifier, dataFlow, instanceId, new ArrayList<>(nodeStatuses.values()), revisionManager.getAllRevisions().stream().map(rev -> ComponentRevision.fromRevision(rev)).collect(Collectors.toList())); ======= return new ConnectionResponse(resolvedNodeIdentifier, dataFlow, instanceId, getConnectionStatuses(), revisionManager.getAllRevisions().stream().map(rev -> ComponentRevision.fromRevision(rev)).collect(Collectors.toList())); >>>>>>> return new ConnectionResponse(resolvedNodeIdentifier, dataFlow, instanceId, getConnectionStatuses(), revisionManager.getAllRevisions().stream().map(rev -> ComponentRevision.fromRevision(rev)).collect(Collectors.toList()));
<<<<<<< import static org.opencastproject.index.service.util.CatalogAdapterUtil.getCatalogProperties; import static org.opencastproject.pm.api.Person.person; import static org.opencastproject.security.api.SecurityConstants.GLOBAL_ADMIN_ROLE; ======= >>>>>>> import static org.opencastproject.index.service.util.CatalogAdapterUtil.getCatalogProperties; <<<<<<< ======= import org.opencastproject.security.urlsigning.service.UrlSigningService; import org.opencastproject.security.urlsigning.utils.UrlSigningServiceOsgiUtil; import org.opencastproject.series.api.SeriesService; >>>>>>> import org.opencastproject.security.urlsigning.service.UrlSigningService; import org.opencastproject.security.urlsigning.utils.UrlSigningServiceOsgiUtil; import org.opencastproject.series.api.SeriesService; <<<<<<< ParticipationManagementDatabase pmDatabase = EasyMock.createNiceMock(ParticipationManagementDatabase.class); EasyMock.expect(pmDatabase.getRecordingByEvent(EasyMock.anyLong())).andReturn( createRecording(1, "A", "Test title A")); EasyMock.expect(pmDatabase.getRecordingByEvent(EasyMock.anyLong())).andReturn( createRecording(2, "B", "Test title B")); EasyMock.expect(pmDatabase.getRecordingByEvent(EasyMock.anyLong())).andReturn( createRecording(3, "C", "Test title C")); EasyMock.expect(pmDatabase.getMessagesByRecordingId(EasyMock.anyLong(), EasyMock.anyObject(Option.class))) .andReturn(Arrays.asList(createMessage(1, "template1", "Titel 1", "Body 1"))); EasyMock.expect(pmDatabase.getMessagesByRecordingId(EasyMock.anyLong(), EasyMock.anyObject(Option.class))) .andReturn( Arrays.asList(createMessage(2, "template2", "Titel 2", "Body 2"), createMessage(3, "template3", "Titel 3", "Body 3"))); EasyMock.expect(pmDatabase.getMessagesByRecordingId(EasyMock.anyLong(), EasyMock.anyObject(Option.class))) .andReturn(Arrays.asList(createMessage(4, "template4", "Titel 4", "Body 4"))); EasyMock.replay(pmDatabase); env.setParticipationManagementDatabase(pmDatabase); ======= DublinCoreCatalogService dublinCoreCatalogService = EasyMock.createNiceMock(DublinCoreCatalogService.class); env.setDublinCoreCatalogService(dublinCoreCatalogService); >>>>>>> DublinCoreCatalogService dublinCoreCatalogService = EasyMock.createNiceMock(DublinCoreCatalogService.class); env.setDublinCoreCatalogService(dublinCoreCatalogService); <<<<<<< public ParticipationManagementDatabase getPMPersistence() { return env.getPmPersistence(); } @Override ======= public SeriesService getSeriesService() { return env.getSeriesService(); } @Override public DublinCoreCatalogService getDublinCoreService() { return env.getDublinCoreService(); } @Override >>>>>>> public SeriesService getSeriesService() { return env.getSeriesService(); } @Override public DublinCoreCatalogService getDublinCoreService() { return env.getDublinCoreService(); } @Override
<<<<<<< registerParser("ZDA", ZDAParser.class); registerParser("MDA", MDAParser.class); ======= registerParser("ZDA", ZDAParser.class); registerParser("MWD", MWDParser.class); >>>>>>> registerParser("ZDA", ZDAParser.class); registerParser("MDA", MDAParser.class); registerParser("MWD", MWDParser.class);
<<<<<<< config.getString(Key.OVERLAY_STRING_FONT, "Helvetica")); ======= config.getString(FONT_CONFIG_KEY, "Arial")); >>>>>>> config.getString(Key.OVERLAY_STRING_FONT, "Arial"));
<<<<<<< import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; ======= import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import java.io.IOException; >>>>>>> import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; <<<<<<< import static org.junit.jupiter.api.Assertions.*; ======= import static org.junit.Assert.*; >>>>>>> import static org.junit.jupiter.api.Assertions.*; <<<<<<< private void initializeHTTPS() { ======= private void initializeHTTPSWithJKSKeyStoreWithPassword() throws IOException { >>>>>>> private void initializeHTTPSWithJKSKeyStoreWithPassword() {
<<<<<<< final Rectangle region = crop.getRectangle( fullSize, opList.getScaleConstraint()); final double x = region.x() / fullSize.width(); final double y = region.y() / fullSize.height(); final double width = region.width() / fullSize.width(); final double height = region.height() / fullSize.height(); command.add("-region"); command.add(String.format("{%s,%s},{%s,%s}", yDecFormat.format(y), xDecFormat.format(x), yDecFormat.format(height), xDecFormat.format(width))); ======= if (!crop.isFull()) { final NumberFormat xFormat = NumberFormat.getInstance(Locale.US); xFormat.setRoundingMode(RoundingMode.DOWN); // This will always be true for Locale.US. No need to check // if it isn't since the kdu_expand invocation will make // that obvious. if (xFormat instanceof DecimalFormat) { // Truncate coordinates to (num digits) + 1 decimal // places to prevent kdu_expand from returning an extra // pixel of width/height. // N.B.: this broke sometime between KDU v7.6 and // v7.10.4, and kdu_expand now unpredictably returns an // extra pixel. Too bad, but Java2DUtil.crop() will // take care of it. final int xDecimalPlaces = Integer.toString(imageSize.width).length() + 1; String xPattern = "#." + StringUtils.repeat("#", xDecimalPlaces); ((DecimalFormat) xFormat).applyPattern(xPattern); } final NumberFormat yFormat = NumberFormat.getInstance(Locale.US); yFormat.setRoundingMode(RoundingMode.DOWN); if (yFormat instanceof DecimalFormat) { final int yDecimalPlaces = Integer.toString(imageSize.height).length() + 1; String yPattern = "#." + StringUtils.repeat("#", yDecimalPlaces); ((DecimalFormat) yFormat).applyPattern(yPattern); } double x, y, width, height; // 0-1 if (Crop.Shape.SQUARE.equals(crop.getShape())) { final int shortestSide = Math.min(imageSize.width, imageSize.height); x = (imageSize.width - shortestSide) / (double) imageSize.width / 2f; y = (imageSize.height - shortestSide) / (double) imageSize.height / 2f; width = shortestSide / (double) imageSize.width; height = shortestSide / (double) imageSize.height; } else { x = crop.getX(); y = crop.getY(); width = crop.getWidth(); height = crop.getHeight(); if (Crop.Unit.PIXELS.equals(crop.getUnit())) { x /= imageSize.width; y /= imageSize.height; width /= imageSize.width; height /= imageSize.height; } } command.add("-region"); command.add(String.format("{%s,%s},{%s,%s}", yFormat.format(y), xFormat.format(x), yFormat.format(height), xFormat.format(width))); } >>>>>>> final Rectangle region = crop.getRectangle( fullSize, opList.getScaleConstraint()); final double x = region.x() / fullSize.width(); final double y = region.y() / fullSize.height(); final double width = region.width() / fullSize.width(); final double height = region.height() / fullSize.height(); command.add("-region"); command.add(String.format("{%s,%s},{%s,%s}", yFormat.format(y), xFormat.format(x), yFormat.format(height), xFormat.format(width)));
<<<<<<< @Test void testConstructorWithZeroLengthValueArgument() { assertThrows(IllegalArgumentException.class, () -> new Header("name", "")); ======= @Test public void testConstructorWithZeroLengthValueArgument() { instance = new Header("name", ""); assertEquals("", instance.getValue()); >>>>>>> @Test void testConstructorWithZeroLengthValueArgument() { instance = new Header("name", ""); assertEquals("", instance.getValue());
<<<<<<< getPublicRootRef(request.getRootRef(), request.getHeaders()) + RestletApplication.IIIF_2_PATH + "/" + ======= getPublicRootReference() + WebApplication.IIIF_2_PATH + "/" + >>>>>>> getPublicRootReference() + RestletApplication.IIIF_2_PATH + "/" + <<<<<<< private String getImageURI() { final Series<Header> requestHeades = getRequest().getHeaders(); return getPublicRootRef(getRequest().getRootRef(), requestHeades) + RestletApplication.IIIF_2_PATH + "/" + ======= private String getImageUri() { return getPublicRootReference() + WebApplication.IIIF_2_PATH + "/" + >>>>>>> private String getImageURI() { return getPublicRootReference() + RestletApplication.IIIF_2_PATH + "/" +
<<<<<<< ======= rep.getObjectWriter(). without(SerializationFeature.WRITE_NULL_MAP_VALUES). without(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS); rep.setCharacterSet(CharacterSet.UTF_8); // Add client cache header(s) if configured to do so. We do this later // rather than sooner to prevent them from being sent along with an // error response. getResponseCacheDirectives().addAll(getCacheDirectives()); >>>>>>>