conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
@ContextConfiguration(classes = JavaMessageListenerContainerAwsTest.MessageListenerContainerAwsTestConfiguration.class)
class JavaMessageListenerContainerAwsTest extends MessageListenerContainerAwsTest {
=======
@ContextConfiguration(
classes = JavaMessageListenerContainerAwsTest.MessageListenerContainerAwsTestConfiguration.class)
public class JavaMessageListenerContainerAwsTest extends MessageListenerContainerAwsTest {
>>>>>>>
@ContextConfiguration(
classes = JavaMessageListenerContainerAwsTest.MessageListenerContainerAwsTestConfiguration.class)
class JavaMessageListenerContainerAwsTest extends MessageListenerContainerAwsTest { |
<<<<<<<
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
=======
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
>>>>>>>
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
<<<<<<<
Assertions.assertEquals("value1", this.stackTag1);
Assertions.assertEquals("value2", this.stackTag2);
=======
assertThat(this.stackTag1).isEqualTo("value1");
assertThat(this.stackTag2).isEqualTo("value2");
>>>>>>>
assertThat(this.stackTag1).isEqualTo("value1");
assertThat(this.stackTag2).isEqualTo("value2"); |
<<<<<<<
=======
import javax.ws.rs.ApplicationPath;
>>>>>>>
import javax.ws.rs.ApplicationPath;
<<<<<<<
final IPWhitelistFilter iPWhitelistFilter = new IPWhitelistFilter();
final RawTransactionResource rawTransactionResource = new RawTransactionResource(transactionManager);
=======
final RawTransactionResource rawTransactionResource = new RawTransactionResource();
>>>>>>>
final RawTransactionResource rawTransactionResource = new RawTransactionResource(transactionManager); |
<<<<<<<
@Override
public String workdir() { return "qdata"; };
@Override
public String socket() { return "/tmp/tst1.ipc"; };
=======
@Override
public List<String> generatekeys() {
return emptyList();
}
>>>>>>>
@Override
public List<String> generatekeys() {
return emptyList();
}
@Override
public String workdir() { return "qdata"; };
@Override
public String socket() { return "/tmp/tst1.ipc"; }; |
<<<<<<<
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ObjectMetadata;
import org.elasticspring.support.TestStackEnvironment;
import org.junit.After;
=======
import org.elasticspring.core.region.S3Region;
>>>>>>>
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ObjectMetadata;
import org.elasticspring.support.TestStackEnvironment;
import org.junit.After;
import org.elasticspring.core.region.S3Region;
<<<<<<<
//Cleans up the bucket. Because if the bucket is not cleaned up, then the bucket will not be deleted after the test run.
@After
public void tearDown() throws Exception {
String bucketName = this.testStackEnvironment.getByLogicalId("EmptyBucket");
for (String createdObject : this.createdObjects) {
this.amazonS3.deleteObject(bucketName, createdObject);
}
=======
@Test
@IfProfileValue(name = "test-groups", value = "aws-test")
public void testBucketNamesWithDotsOnAllS3Regions() throws IOException {
for (S3Region region : S3Region.values()) {
InputStream inputStream = null;
try {
String bucketNameWithDots = region.getLocation() + ".elasticspring.org";
Resource resource = this.resourceLoader.getResource(S3_PREFIX + bucketNameWithDots + "/test.txt");
inputStream = resource.getInputStream();
Assert.assertTrue(resource.contentLength() > 0);
Assert.assertNotNull(inputStream);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
}
}
@Test
@IfProfileValue(name = "test-groups", value = "aws-test")
public void testBucketNamesWithoutDotsOnAllS3Regions() throws IOException {
for (S3Region region : S3Region.values()) {
InputStream inputStream = null;
try {
String bucketNameWithoutDots = region.getLocation() + "-elasticspring-org";
Resource resource = this.resourceLoader.getResource(S3_PREFIX + bucketNameWithoutDots + "/test.txt");
inputStream = resource.getInputStream();
Assert.assertTrue(resource.contentLength() > 0);
Assert.assertNotNull(inputStream);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
}
>>>>>>>
@Test
@IfProfileValue(name = "test-groups", value = "aws-test")
public void testBucketNamesWithDotsOnAllS3Regions() throws IOException {
for (S3Region region : S3Region.values()) {
InputStream inputStream = null;
try {
String bucketNameWithDots = region.getLocation() + ".elasticspring.org";
Resource resource = this.resourceLoader.getResource(S3_PREFIX + bucketNameWithDots + "/test.txt");
inputStream = resource.getInputStream();
Assert.assertTrue(resource.contentLength() > 0);
Assert.assertNotNull(inputStream);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
}
}
@Test
@IfProfileValue(name = "test-groups", value = "aws-test")
public void testBucketNamesWithoutDotsOnAllS3Regions() throws IOException {
for (S3Region region : S3Region.values()) {
InputStream inputStream = null;
try {
String bucketNameWithoutDots = region.getLocation() + "-elasticspring-org";
Resource resource = this.resourceLoader.getResource(S3_PREFIX + bucketNameWithoutDots + "/test.txt");
inputStream = resource.getInputStream();
Assert.assertTrue(resource.contentLength() > 0);
Assert.assertNotNull(inputStream);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
}
//Cleans up the bucket. Because if the bucket is not cleaned up, then the bucket will not be deleted after the test run.
@After
public void tearDown() throws Exception {
String bucketName = this.testStackEnvironment.getByLogicalId("EmptyBucket");
for (String createdObject : this.createdObjects) {
this.amazonS3.deleteObject(bucketName, createdObject);
} |
<<<<<<<
classes = BootNotificationMessagingTemplateIntegrationTest.NotificationMessagingTemplateIntegrationTestConfiguration.class)
class BootNotificationMessagingTemplateIntegrationTest
=======
classes = BootNotificationMessagingTemplateIntegrationTest.NotificationMessagingTemplateIntegrationTestConfiguration.class,
properties = {
"cloud.aws.credentials.access-key=${aws-integration-tests.accessKey}",
"cloud.aws.credentials.secret-key=${aws-integration-tests.secretKey}" })
public class BootNotificationMessagingTemplateIntegrationTest
>>>>>>>
classes = BootNotificationMessagingTemplateIntegrationTest.NotificationMessagingTemplateIntegrationTestConfiguration.class,
properties = {
"cloud.aws.credentials.access-key=${aws-integration-tests.accessKey}",
"cloud.aws.credentials.secret-key=${aws-integration-tests.secretKey}" })
class BootNotificationMessagingTemplateIntegrationTest |
<<<<<<<
registerBeanDefinitionParser("cache-manager", new CacheBeanDefinitionParser());
=======
registerBeanDefinitionParser("context-region", new ContextRegionBeanDefinitionParser());
>>>>>>>
registerBeanDefinitionParser("cache-manager", new CacheBeanDefinitionParser());
registerBeanDefinitionParser("context-region", new ContextRegionBeanDefinitionParser()); |
<<<<<<<
@SpringBootTest(classes = BootQueueMessagingTemplateIntegrationTest.QueueMessagingTemplateIntegrationTestConfiguration.class)
class BootQueueMessagingTemplateIntegrationTest
=======
@SpringBootTest(
classes = BootQueueMessagingTemplateIntegrationTest.QueueMessagingTemplateIntegrationTestConfiguration.class)
public class BootQueueMessagingTemplateIntegrationTest
>>>>>>>
@SpringBootTest(
classes = BootQueueMessagingTemplateIntegrationTest.QueueMessagingTemplateIntegrationTestConfiguration.class)
class BootQueueMessagingTemplateIntegrationTest |
<<<<<<<
private int mAllDayEventHeight = 100;
=======
private int mAllDayEventHeight= 100;
private int mScrollDuration = 250;
>>>>>>>
private int mAllDayEventHeight = 100;
private int mScrollDuration = 250;
<<<<<<<
mAllDayEventHeight = a.getDimensionPixelSize(R.styleable.WeekView_allDayEventHeight, mAllDayEventHeight);
=======
mAllDayEventHeight = a.getInt(R.styleable.WeekView_allDayEventHeight, mAllDayEventHeight);
mScrollDuration = a.getInt(R.styleable.WeekView_scrollDuration, mScrollDuration);
>>>>>>>
mAllDayEventHeight = a.getDimensionPixelSize(R.styleable.WeekView_allDayEventHeight, mAllDayEventHeight);
mScrollDuration = a.getInt(R.styleable.WeekView_scrollDuration, mScrollDuration); |
<<<<<<<
double scrollToHour = mScrollToHour;
=======
mEffectiveMinHourHeight= Math.max(mMinHourHeight, (int) ((getHeight() - mHeaderTextHeight - mHeaderRowPadding * 2 - mHeaderMarginBottom) / 24));
>>>>>>>
mEffectiveMinHourHeight= Math.max(mMinHourHeight, (int) ((getHeight() - mHeaderTextHeight - mHeaderRowPadding * 2 - mHeaderMarginBottom) / 24));
double scrollToHour = mScrollToHour; |
<<<<<<<
import android.R.integer;
=======
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
>>>>>>>
import android.R.integer;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
<<<<<<<
if(mDimmerView == null || mDialogHolder == null) {
throw new NullPointerException("Calling showDialog(View) before onFinishInflate() was called.");
}
mDimmerView
.animate()
.alpha(DIMMED_ALPHA_VALUE)
.setDuration(mTransitionAnimationDurationMs)
.withEndAction(new Runnable() {
@Override
public void run() {
mDimmerView.setClickable(true);
}
});
=======
ObjectAnimator animator = ObjectAnimator.ofFloat(mDimmerView, ALPHA, DIMMED_ALPHA_VALUE);
animator.setDuration(mTransitionAnimationDurationMs);
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
mDimmerView.setClickable(true);
}
});
animator.start();
>>>>>>>
if (mDimmerView == null || mDialogHolder == null) {
throw new NullPointerException("Calling showDialog(View) before onFinishInflate() was called.");
}
ObjectAnimator animator = ObjectAnimator.ofFloat(mDimmerView, ALPHA, DIMMED_ALPHA_VALUE);
animator.setDuration(mTransitionAnimationDurationMs);
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
mDimmerView.setClickable(true);
}
});
animator.start();
<<<<<<<
if(mDimmerView == null || mDialogHolder == null) {
throw new NullPointerException("Calling dismissDialog(View) before onFinishInflate() was called.");
}
dialogView.animate()
.alpha(0f)
.scaleX(0f)
.scaleY(0f)
.setDuration(mTransitionAnimationDurationMs)
.withEndAction(new Runnable() {
@Override
public void run() {
mDialogHolder.removeView(dialogView);
if (mDialogHolder.getChildCount() == 0) {
mDimmerView
.animate()
.alpha(0f);
mDimmerView.setClickable(false);
}
}
});
=======
ObjectAnimator alphaAnimator = ObjectAnimator.ofFloat(dialogView, ALPHA, 0f);
ObjectAnimator scaleXAnimator = ObjectAnimator.ofFloat(dialogView, SCALE_X, 0f);
ObjectAnimator scaleYAnimator = ObjectAnimator.ofFloat(dialogView, SCALE_Y, 0f);
AnimatorSet animatorSet = new AnimatorSet();
animatorSet.playTogether(alphaAnimator, scaleXAnimator, scaleYAnimator);
animatorSet.setDuration(mTransitionAnimationDurationMs);
animatorSet.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
mDialogHolder.removeView(dialogView);
if (mDialogHolder.getChildCount() == 0) {
mDimmerView
.animate()
.alpha(0f);
mDimmerView.setClickable(false);
}
}
});
animatorSet.start();
>>>>>>>
if (mDimmerView == null || mDialogHolder == null) {
throw new NullPointerException("Calling dismissDialog(View) before onFinishInflate() was called.");
}
ObjectAnimator alphaAnimator = ObjectAnimator.ofFloat(dialogView, ALPHA, 0f);
ObjectAnimator scaleXAnimator = ObjectAnimator.ofFloat(dialogView, SCALE_X, 0f);
ObjectAnimator scaleYAnimator = ObjectAnimator.ofFloat(dialogView, SCALE_Y, 0f);
AnimatorSet animatorSet = new AnimatorSet();
animatorSet.playTogether(alphaAnimator, scaleXAnimator, scaleYAnimator);
animatorSet.setDuration(mTransitionAnimationDurationMs);
animatorSet.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(final Animator animation) {
mDialogHolder.removeView(dialogView);
if (mDialogHolder.getChildCount() == 0) {
mDimmerView
.animate()
.alpha(0f);
mDimmerView.setClickable(false);
}
}
});
animatorSet.start(); |
<<<<<<<
@StreamMessageConverter
public AbstractMessageConverter busJsonConverter() {
return new BusJacksonMessageConverter();
=======
public BusJacksonMessageConverter busJsonConverter(@Autowired(required = false) ObjectMapper objectMapper) {
return new BusJacksonMessageConverter(objectMapper);
>>>>>>>
@StreamMessageConverter
public AbstractMessageConverter busJsonConverter(@Autowired(required = false) ObjectMapper objectMapper) {
return new BusJacksonMessageConverter(objectMapper); |
<<<<<<<
@Configuration
@ConditionalOnClass({ Endpoint.class, RefreshScope.class })
protected static class BusRefreshConfiguration {
@Configuration
@ConditionalOnBean(ContextRefresher.class)
protected static class BusRefreshEndpointConfiguration {
@Bean
@ConditionalOnEnabledEndpoint
public RefreshBusEndpoint refreshBusEndpoint(ApplicationContext context,
BusProperties bus) {
return new RefreshBusEndpoint(context, bus.getId());
}
}
}
=======
>>>>>>> |
<<<<<<<
key = k;
project = p;
lock = new Object();
=======
>>>>>>>
key = k;
project = p; |
<<<<<<<
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null);
final KeyConfiguration keyConfig =
new KeyConfiguration(null, emptyList(), singletonList(keypair), null, null, null);
=======
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(null, emptyList(), singletonList(keypair), null, null);
>>>>>>>
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(null, emptyList(), singletonList(keypair), null, null, null);
<<<<<<<
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(null, null, singletonList(keypair), null, null, null);
=======
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(null, null, singletonList(keypair), null, null);
>>>>>>>
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(null, null, singletonList(keypair), null, null, null);
<<<<<<<
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null);
final KeyConfiguration keyConfig =
new KeyConfiguration(
null, singletonList("passwordsAssignedToKeys"), singletonList(keypair), null, null, null);
=======
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig =
new KeyConfiguration(
null, singletonList("passwordsAssignedToKeys"), singletonList(keypair), null, null);
>>>>>>>
// null paths since we won't actually be reading them
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(null, singletonList("passwordsAssignedToKeys"), singletonList(keypair), null, null, null);
<<<<<<<
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(passes, null, singletonList(keypair), null, null, null);
=======
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(passes, null, singletonList(keypair), null, null);
>>>>>>>
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(passes, null, singletonList(keypair), null, null, null);
<<<<<<<
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(passes, null, singletonList(keypair), null, null, null);
=======
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(passes, null, singletonList(keypair), null, null);
>>>>>>>
final ConfigKeyPair keypair = new FilesystemKeyPair(null, null, null);
final KeyConfiguration keyConfig = new KeyConfiguration(passes, null, singletonList(keypair), null, null, null);
<<<<<<<
final KeyDataConfig privKeyDataConfig =
new KeyDataConfig(
new PrivateKeyData(
"Wl+xSyXVuuqzpvznOS7dOobhcn4C5auxkFRi7yLtgtA=",
"yb7M8aRJzgxoJM2NecAPcmSVWDW1tRjv",
"MIqkFlgR2BWEpx2U0rObGg==",
"Gtvp1t6XZEiFVyaE/LHiP1+yvOIBBoiOL+bKeqcKgpiNt4j1oDDoqCC47UJpmQRC",
new ArgonOptions("i", 10, 1048576, 4)),
PrivateKeyType.LOCKED);
=======
final KeyDataConfig privKeyDataConfig = mock(KeyDataConfig.class);
when(privKeyDataConfig.getType()).thenReturn(PrivateKeyType.LOCKED);
PrivateKeyData privateKeyData = mock(PrivateKeyData.class);
when(privKeyDataConfig.getPrivateKeyData()).thenReturn(privateKeyData);
>>>>>>>
final KeyDataConfig privKeyDataConfig =
mock(KeyDataConfig.class);
when(privKeyDataConfig.getType()).thenReturn(
PrivateKeyType.LOCKED);
PrivateKeyData privateKeyData = mock(PrivateKeyData.class);
when(privKeyDataConfig.getPrivateKeyData()).thenReturn(privateKeyData);
<<<<<<<
final KeyDataConfig privKeyDataConfig =
new KeyDataConfig(
new PrivateKeyData(
"Wl+xSyXVuuqzpvznOS7dOobhcn4C5auxkFRi7yLtgtA=",
"yb7M8aRJzgxoJM2NecAPcmSVWDW1tRjv",
"MIqkFlgR2BWEpx2U0rObGg==",
"Gtvp1t6XZEiFVyaE/LHiP1+yvOIBBoiOL+bKeqcKgpiNt4j1oDDoqCC47UJpmQRC",
new ArgonOptions("i", 10, 1048576, 4)),
PrivateKeyType.LOCKED);
final InlineKeypair keyPair = new InlineKeypair("public", privKeyDataConfig);
=======
final KeyDataConfig privKeyDataConfig =
new KeyDataConfig(
new PrivateKeyData(
"Wl+xSyXVuuqzpvznOS7dOobhcn4C5auxkFRi7yLtgtA=",
"yb7M8aRJzgxoJM2NecAPcmSVWDW1tRjv",
"MIqkFlgR2BWEpx2U0rObGg==",
"Gtvp1t6XZEiFVyaE/LHiP1+yvOIBBoiOL+bKeqcKgpiNt4j1oDDoqCC47UJpmQRC",
new ArgonOptions("i", 10, 1048576, 4)),
PrivateKeyType.LOCKED);
KeyEncryptor keyEncryptor = mock(KeyEncryptor.class);
final InlineKeypair keyPair = new InlineKeypair("public", privKeyDataConfig, keyEncryptor);
>>>>>>>
final KeyDataConfig privKeyDataConfig =
new KeyDataConfig(
new PrivateKeyData(
"Wl+xSyXVuuqzpvznOS7dOobhcn4C5auxkFRi7yLtgtA=",
"yb7M8aRJzgxoJM2NecAPcmSVWDW1tRjv",
"MIqkFlgR2BWEpx2U0rObGg==",
"Gtvp1t6XZEiFVyaE/LHiP1+yvOIBBoiOL+bKeqcKgpiNt4j1oDDoqCC47UJpmQRC",
new ArgonOptions("i", 10, 1048576, 4)),
PrivateKeyType.LOCKED);
KeyEncryptor keyEncryptor = mock(KeyEncryptor.class);
final InlineKeypair keyPair = new InlineKeypair("public", privKeyDataConfig, keyEncryptor);
<<<<<<<
final KeyDataConfig privKeyDataConfig =
new KeyDataConfig(
new PrivateKeyData(
"Wl+xSyXVuuqzpvznOS7dOobhcn4C5auxkFRi7yLtgtA=",
"yb7M8aRJzgxoJM2NecAPcmSVWDW1tRjv",
"MIqkFlgR2BWEpx2U0rObGg==",
"Gtvp1t6XZEiFVyaE/LHiP1+yvOIBBoiOL+bKeqcKgpiNt4j1oDDoqCC47UJpmQRC",
new ArgonOptions("i", 10, 1048576, 4)),
PrivateKeyType.LOCKED);
final InlineKeypair keyPair = new InlineKeypair("public", privKeyDataConfig);
=======
PrivateKeyData privateKeyData = mock(PrivateKeyData.class);
final KeyDataConfig privKeyDataConfig = mock(KeyDataConfig.class);
when(privKeyDataConfig.getPrivateKeyData()).thenReturn(privateKeyData);
when(privKeyDataConfig.getType()).thenReturn(PrivateKeyType.LOCKED);
final InlineKeypair keyPair = mock(InlineKeypair.class);
when(keyPair.getPrivateKeyConfig()).thenReturn(privKeyDataConfig);
>>>>>>>
PrivateKeyData privateKeyData = mock(PrivateKeyData.class);final KeyDataConfig privKeyDataConfig =
mock(KeyDataConfig.class);
when(privKeyDataConfig.getPrivateKeyData()).thenReturn(privateKeyData);
when(privKeyDataConfig.getType()).thenReturn(
PrivateKeyType.LOCKED);
final InlineKeypair keyPair = mock(InlineKeypair.class);
when(keyPair.getPrivateKeyConfig()).thenReturn(privKeyDataConfig);
<<<<<<<
=======
@Test
public void lockedKeyWithEncrptionErrorP() {
when(passwordReader.readPasswordFromConsole()).thenReturn("a");
final KeyDataConfig privKeyDataConfig = mock(KeyDataConfig.class);
when(privKeyDataConfig.getType()).thenReturn(PrivateKeyType.LOCKED);
PrivateKeyData privateKeyData = mock(PrivateKeyData.class);
when(privKeyDataConfig.getPrivateKeyData()).thenReturn(privateKeyData);
final InlineKeypair keyPair = mock(InlineKeypair.class);
when(keyPair.getPrivateKeyConfig()).thenReturn(privKeyDataConfig);
when(keyPair.getPrivateKey()).thenReturn("NACL_FAILURE");
this.cliKeyPasswordResolver.getSingleKeyPassword(0, keyPair);
assertThat(systemOutRule.getLog())
.containsOnlyOnce(
"Password for key[0] missing or invalid.\nAttempt 1 of 2. Enter a password for the key");
}
>>>>>>>
@Test
public void lockedKeyWithEncrptionErrorP() {
when(passwordReader.readPasswordFromConsole()).thenReturn("a");
final KeyDataConfig privKeyDataConfig = mock(KeyDataConfig.class);
when(privKeyDataConfig.getType()).thenReturn(PrivateKeyType.LOCKED);
PrivateKeyData privateKeyData = mock(PrivateKeyData.class);
when(privKeyDataConfig.getPrivateKeyData()).thenReturn(privateKeyData);
final InlineKeypair keyPair = mock(InlineKeypair.class);
when(keyPair.getPrivateKeyConfig()).thenReturn(privKeyDataConfig);
when(keyPair.getPrivateKey()).thenReturn("NACL_FAILURE");
this.cliKeyPasswordResolver.getSingleKeyPassword(0, keyPair);
assertThat(systemOutRule.getLog())
.containsOnlyOnce(
"Password for key[0] missing or invalid.\nAttempt 1 of 2. Enter a password for the key");
} |
<<<<<<<
=======
import java.io.IOException;
import java.util.List;
>>>>>>>
import java.io.IOException;
import java.util.List; |
<<<<<<<
if(!messageIsForChange()) {
commitMessageNotForChange();
=======
if (!messageIsForChange()) {
insertMessage(db);
if (changeMessage != null) {
ChangeUtil.bumpRowVersionNotLastUpdatedOn(
changeMessage.getKey().getParentKey(), db);
}
>>>>>>>
if(!messageIsForChange()) {
commitMessageNotForChange();
if (changeMessage != null) {
ChangeUtil.bumpRowVersionNotLastUpdatedOn(
changeMessage.getKey().getParentKey(), db);
} |
<<<<<<<
validateParentUpdate(ctl, input.parent, checkIfAdmin);
=======
String parentName = MoreObjects.firstNonNull(
Strings.emptyToNull(input.parent), allProjects.get());
validateParentUpdate(ctl, parentName, true);
IdentifiedUser user = (IdentifiedUser) ctl.getCurrentUser();
>>>>>>>
String parentName = MoreObjects.firstNonNull(
Strings.emptyToNull(input.parent), allProjects.get());
validateParentUpdate(ctl, parentName, checkIfAdmin); |
<<<<<<<
=======
if (topic.canEdit()) {
keysAction.add(new KeyCommand(0, 't', Util.C.keyEditTopic()) {
@Override
public void onKeyPress(KeyPressEvent event) {
// In Firefox this event is mistakenly called when F5 is pressed so
// differentiate F5 from 't' by checking the charCode(F5=0, t=116).
if (event.getNativeEvent().getCharCode() == 0) {
Window.Location.reload();
return;
}
topic.onEdit();
}
});
}
>>>>>>> |
<<<<<<<
import com.google.gerrit.testutil.FakeEmailSender;
=======
import com.google.gerrit.server.util.SystemLog;
>>>>>>>
import com.google.gerrit.server.util.SystemLog;
import com.google.gerrit.testutil.FakeEmailSender; |
<<<<<<<
import java.nio.file.Files;
import java.nio.file.Path;
=======
import java.util.Collection;
>>>>>>>
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection; |
<<<<<<<
createCommit(
testRepo,
=======
testPushWithMultipleChangeIds();
}
@Test
public void pushWithMultipleChangeIdsWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithMultipleChangeIds();
}
private void testPushWithMultipleChangeIds() throws Exception {
createCommit(testRepo,
>>>>>>>
testPushWithMultipleChangeIds();
}
@Test
public void pushWithMultipleChangeIdsWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithMultipleChangeIds();
}
private void testPushWithMultipleChangeIds() throws Exception {
createCommit(
testRepo,
<<<<<<<
createCommit(testRepo, "Message with invalid Change-Id\n" + "\n" + "Change-Id: X\n");
pushForReviewRejected(testRepo, "invalid Change-Id line format in commit message footer");
=======
testpushWithInvalidChangeId();
}
@Test
public void pushWithInvalidChangeIdWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testpushWithInvalidChangeId();
}
private void testpushWithInvalidChangeId() throws Exception {
createCommit(testRepo, "Message with invalid Change-Id\n"
+ "\n"
+ "Change-Id: X\n");
pushForReviewRejected(testRepo,
"invalid Change-Id line format in commit message footer");
>>>>>>>
testpushWithInvalidChangeId();
}
@Test
public void pushWithInvalidChangeIdWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testpushWithInvalidChangeId();
}
private void testpushWithInvalidChangeId() throws Exception {
createCommit(testRepo, "Message with invalid Change-Id\n" + "\n" + "Change-Id: X\n");
pushForReviewRejected(testRepo, "invalid Change-Id line format in commit message footer");
<<<<<<<
createCommit(
testRepo,
"Message with invalid Change-Id\n"
+ "\n"
+ "Change-Id: I0000000000000000000000000000000000000000\n");
pushForReviewRejected(testRepo, "invalid Change-Id line format in commit message footer");
=======
testPushWithInvalidChangeIdFromEgit();
}
@Test
public void pushWithInvalidChangeIdFromEgitWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithInvalidChangeIdFromEgit();
}
private void testPushWithInvalidChangeIdFromEgit() throws Exception {
createCommit(testRepo, "Message with invalid Change-Id\n"
+ "\n"
+ "Change-Id: I0000000000000000000000000000000000000000\n");
pushForReviewRejected(testRepo,
"invalid Change-Id line format in commit message footer");
>>>>>>>
testPushWithInvalidChangeIdFromEgit();
}
@Test
public void pushWithInvalidChangeIdFromEgitWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithInvalidChangeIdFromEgit();
}
private void testPushWithInvalidChangeIdFromEgit() throws Exception {
createCommit(
testRepo,
"Message with invalid Change-Id\n"
+ "\n"
+ "Change-Id: I0000000000000000000000000000000000000000\n");
pushForReviewRejected(testRepo, "invalid Change-Id line format in commit message footer"); |
<<<<<<<
public byte[] createNewRecipientBox(EncodedPayloadWithRecipients payload, PublicKey publicKey) {
if (payload.getRecipientKeys().isEmpty() || payload.getEncodedPayload().getRecipientBoxes().isEmpty()) {
throw new RuntimeException("No key or recipient-box to use");
}
final MasterKey master = this.getMasterKey(
payload.getRecipientKeys().get(0),
payload.getEncodedPayload().getSenderKey(),
payload.getEncodedPayload().getRecipientNonce(),
payload.getEncodedPayload().getRecipientBoxes().get(0)
);
final List<byte[]> sealedMasterKeyList = this.buildRecipientMasterKeys(
payload.getEncodedPayload().getSenderKey(),
singletonList(publicKey),
payload.getEncodedPayload().getRecipientNonce(),
master
);
return sealedMasterKeyList.get(0);
}
@Override
public EncodedPayloadWithRecipients encryptPayload(final RawTransaction rawTransaction,
=======
public EncodedPayload encryptPayload(final RawTransaction rawTransaction,
>>>>>>>
public byte[] createNewRecipientBox(EncodedPayloadWithRecipients payload, PublicKey publicKey) {
if (payload.getRecipientKeys().isEmpty() || payload.getEncodedPayload().getRecipientBoxes().isEmpty()) {
throw new RuntimeException("No key or recipient-box to use");
}
final MasterKey master = this.getMasterKey(
payload.getRecipientKeys().get(0),
payload.getEncodedPayload().getSenderKey(),
payload.getEncodedPayload().getRecipientNonce(),
payload.getEncodedPayload().getRecipientBoxes().get(0)
);
final List<byte[]> sealedMasterKeyList = this.buildRecipientMasterKeys(
payload.getEncodedPayload().getSenderKey(),
singletonList(publicKey),
payload.getEncodedPayload().getRecipientNonce(),
master
);
return sealedMasterKeyList.get(0);
}
@Override
public EncodedPayload encryptPayload(final RawTransaction rawTransaction, |
<<<<<<<
import static java.util.Collections.*;
import static org.assertj.core.api.Assertions.*;
import org.mockito.ArgumentCaptor;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.Optional;
import static java.util.Collections.*;
=======
import static java.util.Collections.singletonList;
>>>>>>>
import static java.util.Collections.*;
import static org.assertj.core.api.Assertions.*;
import org.mockito.ArgumentCaptor;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.Optional;
import static java.util.Collections.*;
import static java.util.Collections.singletonList;
<<<<<<<
when(enclave.encryptPayload(any(), any(), any())).thenReturn(encodedPayloadWithRecipients);
when(payloadEncoder.forRecipient(any(EncodedPayloadWithRecipients.class), any(PublicKey.class)))
.thenReturn(encodedPayloadWithRecipients);
=======
when(enclave.encryptPayload(any(), any(), any())).thenReturn(encodedPayload);
>>>>>>>
when(enclave.encryptPayload(any(), any(), any())).thenReturn(encodedPayloadWithRecipients);
when(payloadEncoder.forRecipient(any(EncodedPayloadWithRecipients.class), any(PublicKey.class)))
.thenReturn(encodedPayloadWithRecipients);
<<<<<<<
PublicKey recipientKey = PublicKey.from("PUBLICKEY".getBytes());
EncodedPayloadWithRecipients payloadWithRecipients = new EncodedPayloadWithRecipients(encodedPayload, null);
when(payloadEncoder.decodePayloadWithRecipients(encodedPayloadData)).thenReturn(payloadWithRecipients);
when(payloadEncoder.forRecipient(payloadWithRecipients, recipientKey)).thenReturn(payloadWithRecipients);
when(payloadEncoder.encode(any(EncodedPayloadWithRecipients.class))).thenReturn(encodedOutcome);
final String messageHashb64 = Base64.getEncoder().encodeToString("KEY".getBytes());
=======
String publicKeyData = Base64.getEncoder().encodeToString("PUBLICKEY".getBytes());
PublicKey recipientKey = PublicKey.from(publicKeyData.getBytes());
when(payloadEncoder.decode(encodedPayloadData)).thenReturn(encodedPayload);
when(payloadEncoder.forRecipient(encodedPayload, recipientKey)).thenReturn(encodedPayload);
when(payloadEncoder.encode(any(EncodedPayload.class))).thenReturn(encodedOutcome);
byte[] keyData = Base64.getEncoder().encode("KEY".getBytes());
>>>>>>>
PublicKey recipientKey = PublicKey.from("PUBLICKEY".getBytes());
EncodedPayloadWithRecipients payloadWithRecipients = new EncodedPayloadWithRecipients(encodedPayload, null);
when(payloadEncoder.decodePayloadWithRecipients(encodedPayloadData)).thenReturn(payloadWithRecipients);
when(payloadEncoder.forRecipient(payloadWithRecipients, recipientKey)).thenReturn(payloadWithRecipients);
when(payloadEncoder.encode(any(EncodedPayloadWithRecipients.class))).thenReturn(encodedOutcome);
final String messageHashb64 = Base64.getEncoder().encodeToString("KEY".getBytes());
<<<<<<<
verify(payloadEncoder).decodePayloadWithRecipients(encodedPayloadData);
verify(payloadEncoder).forRecipient(payloadWithRecipients, recipientKey);
verify(payloadEncoder).encode(payloadWithRecipients);
=======
verify(payloadEncoder).decode(encodedPayloadData);
verify(payloadEncoder).forRecipient(encodedPayload, recipientKey);
verify(payloadEncoder).encode(any(EncodedPayload.class));
>>>>>>>
verify(payloadEncoder).decodePayloadWithRecipients(encodedPayloadData);
verify(payloadEncoder).forRecipient(payloadWithRecipients, recipientKey);
verify(payloadEncoder).encode(payloadWithRecipients); |
<<<<<<<
private static final Pattern NEW_PATCHSET = Pattern
.compile("^refs/changes/(?:[0-9][0-9])?(/[1-9][0-9]*){1,2}(?:/new)?$");
private static final String GIT_HOOKS_COMMIT_MSG =
"`git rev-parse --git-dir`/hooks/commit-msg";
=======
>>>>>>>
private static final String GIT_HOOKS_COMMIT_MSG =
"`git rev-parse --git-dir`/hooks/commit-msg";
<<<<<<<
|| NEW_PATCHSET.matcher(receiveEvent.command.getRefName()).matches()) {
validators.add(new ChangeIdValidator(refControl, canonicalWebUrl,
installCommitMsgHookCommand, sshInfo));
=======
|| ReceiveCommits.NEW_PATCHSET.matcher(
receiveEvent.command.getRefName()).matches()) {
validators.add(new ChangeIdValidator(refControl, canonicalWebUrl, sshInfo));
>>>>>>>
|| ReceiveCommits.NEW_PATCHSET.matcher(
receiveEvent.command.getRefName()).matches()) {
validators.add(new ChangeIdValidator(refControl, canonicalWebUrl,
installCommitMsgHookCommand, sshInfo));
<<<<<<<
|| NEW_PATCHSET.matcher(receiveEvent.command.getRefName()).matches()) {
validators.add(new ChangeIdValidator(refControl, canonicalWebUrl,
installCommitMsgHookCommand, sshInfo));
=======
|| ReceiveCommits.NEW_PATCHSET.matcher(
receiveEvent.command.getRefName()).matches()) {
validators.add(new ChangeIdValidator(refControl, canonicalWebUrl, sshInfo));
>>>>>>>
|| ReceiveCommits.NEW_PATCHSET.matcher(
receiveEvent.command.getRefName()).matches()) {
validators.add(new ChangeIdValidator(refControl, canonicalWebUrl,
installCommitMsgHookCommand, sshInfo)); |
<<<<<<<
saveProjectConfig(project, cfg);
=======
Util.allow(cfg, Permission.forLabel(P.getName()), 0, 1, anonymousUsers,
"refs/heads/*");
saveProjectConfig(cfg);
>>>>>>>
Util.allow(cfg, Permission.forLabel(P.getName()), 0, 1, anonymousUsers,
"refs/heads/*");
saveProjectConfig(project, cfg);
<<<<<<<
ProjectConfig cfg = projectCache.checkedGet(project).getConfig();
cfg.getLabelSections().put(label.getName(), label);
saveProjectConfig(project, cfg);
=======
ProjectConfig cfg = projectCache.checkedGet(allProjects).getConfig();
cfg.getLabelSections().put(Q.getName(), Q);
cfg.getLabelSections().put(P.getName(), P);
saveProjectConfig(cfg);
}
private void saveProjectConfig(ProjectConfig cfg) throws Exception {
MetaDataUpdate md = metaDataUpdateFactory.create(allProjects);
try {
cfg.commit(md);
} finally {
md.close();
}
projectCache.evict(allProjects);
>>>>>>>
ProjectConfig cfg = projectCache.checkedGet(project).getConfig();
cfg.getLabelSections().put(label.getName(), label);
cfg.getLabelSections().put(P.getName(), P);
saveProjectConfig(project, cfg); |
<<<<<<<
=======
import org.apache.commons.dbcp.BasicDataSource;
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
<<<<<<<
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import javax.sql.DataSource;
>>>>>>>
import javax.sql.DataSource;
import org.apache.commons.dbcp.BasicDataSource;
import org.eclipse.jgit.lib.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
H2AccountPatchReviewStore(@GerritServerConfig Config cfg, SitePaths sitePaths) {
this.url = H2.appendUrlOptions(cfg, getUrl(sitePaths));
=======
H2AccountPatchReviewStore(@GerritServerConfig Config cfg,
SitePaths sitePaths) {
this.ds = createDataSource(H2.appendUrlOptions(cfg, getUrl(sitePaths)));
>>>>>>>
H2AccountPatchReviewStore(@GerritServerConfig Config cfg, SitePaths sitePaths) {
this.ds = createDataSource(H2.appendUrlOptions(cfg, getUrl(sitePaths)));
<<<<<<<
stmt.executeUpdate(
"CREATE TABLE IF NOT EXISTS ACCOUNT_PATCH_REVIEWS ("
+ "ACCOUNT_ID INTEGER DEFAULT 0 NOT NULL, "
+ "CHANGE_ID INTEGER DEFAULT 0 NOT NULL, "
+ "PATCH_SET_ID INTEGER DEFAULT 0 NOT NULL, "
+ "FILE_NAME VARCHAR(255) DEFAULT '' NOT NULL, "
+ "CONSTRAINT PRIMARY_KEY_ACCOUNT_PATCH_REVIEWS "
+ "PRIMARY KEY (ACCOUNT_ID, CHANGE_ID, PATCH_SET_ID, FILE_NAME)"
+ ")");
=======
doCreateTable(stmt);
} catch (SQLException e) {
throw convertError("create", e);
}
}
private void createTableIfNotExists() throws OrmException {
try (Connection con = ds.getConnection();
Statement stmt = con.createStatement()) {
doCreateTable(stmt);
>>>>>>>
doCreateTable(stmt);
} catch (SQLException e) {
throw convertError("create", e);
}
}
private void createTableIfNotExists() throws OrmException {
try (Connection con = ds.getConnection();
Statement stmt = con.createStatement()) {
doCreateTable(stmt);
<<<<<<<
public boolean markReviewed(PatchSet.Id psId, Account.Id accountId, String path)
throws OrmException {
try (Connection con = DriverManager.getConnection(url);
=======
public boolean markReviewed(PatchSet.Id psId, Account.Id accountId,
String path) throws OrmException {
try (Connection con = ds.getConnection();
>>>>>>>
public boolean markReviewed(PatchSet.Id psId, Account.Id accountId, String path)
throws OrmException {
try (Connection con = ds.getConnection(); |
<<<<<<<
con.prepareStatement(
"INSERT INTO ACCOUNT_PATCH_REVIEWS "
+ "(ACCOUNT_ID, CHANGE_ID, PATCH_SET_ID, FILE_NAME) VALUES "
+ "(?, ?, ?, ?)")) {
=======
con.prepareStatement("INSERT INTO account_patch_reviews "
+ "(account_id, change_id, patch_set_id, file_name) VALUES "
+ "(?, ?, ?, ?)")) {
>>>>>>>
con.prepareStatement(
"INSERT INTO account_patch_reviews "
+ "(account_id, change_id, patch_set_id, file_name) VALUES "
+ "(?, ?, ?, ?)")) {
<<<<<<<
con.prepareStatement(
"INSERT INTO ACCOUNT_PATCH_REVIEWS "
+ "(ACCOUNT_ID, CHANGE_ID, PATCH_SET_ID, FILE_NAME) VALUES "
+ "(?, ?, ?, ?)")) {
=======
con.prepareStatement("INSERT INTO account_patch_reviews "
+ "(account_id, change_id, patch_set_id, file_name) VALUES "
+ "(?, ?, ?, ?)")) {
>>>>>>>
con.prepareStatement(
"INSERT INTO account_patch_reviews "
+ "(account_id, change_id, patch_set_id, file_name) VALUES "
+ "(?, ?, ?, ?)")) {
<<<<<<<
con.prepareStatement(
"DELETE FROM ACCOUNT_PATCH_REVIEWS "
+ "WHERE ACCOUNT_ID = ? AND CHANGE_ID + ? AND "
+ "PATCH_SET_ID = ? AND FILE_NAME = ?")) {
=======
con.prepareStatement("DELETE FROM account_patch_reviews "
+ "WHERE account_id = ? AND change_id = ? AND "
+ "patch_set_id = ? AND file_name = ?")) {
>>>>>>>
con.prepareStatement(
"DELETE FROM account_patch_reviews "
+ "WHERE account_id = ? AND change_id = ? AND "
+ "patch_set_id = ? AND file_name = ?")) {
<<<<<<<
con.prepareStatement(
"DELETE FROM ACCOUNT_PATCH_REVIEWS "
+ "WHERE CHANGE_ID + ? AND PATCH_SET_ID = ?")) {
=======
con.prepareStatement("DELETE FROM account_patch_reviews "
+ "WHERE change_id = ? AND patch_set_id = ?")) {
>>>>>>>
con.prepareStatement(
"DELETE FROM account_patch_reviews "
+ "WHERE change_id = ? AND patch_set_id = ?")) {
<<<<<<<
con.prepareStatement(
"SELECT FILE_NAME FROM ACCOUNT_PATCH_REVIEWS "
+ "WHERE ACCOUNT_ID = ? AND CHANGE_ID = ? AND PATCH_SET_ID = ?")) {
=======
con.prepareStatement("SELECT FILE_NAME FROM account_patch_reviews "
+ "WHERE account_id = ? AND change_id = ? AND patch_set_id = ?")) {
>>>>>>>
con.prepareStatement(
"SELECT FILE_NAME FROM account_patch_reviews "
+ "WHERE account_id = ? AND change_id = ? AND patch_set_id = ?")) { |
<<<<<<<
=======
import static com.google.gerrit.acceptance.GitUtil.cloneProject;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
>>>>>>>
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
<<<<<<<
=======
import org.eclipse.jgit.transport.PushResult;
import org.joda.time.DateTime;
import org.joda.time.DateTimeUtils;
import org.joda.time.DateTimeUtils.MillisProvider;
import org.junit.AfterClass;
>>>>>>>
import org.joda.time.DateTime;
import org.joda.time.DateTimeUtils;
import org.joda.time.DateTimeUtils.MillisProvider;
import org.junit.AfterClass;
<<<<<<<
assertThat(cr.all.get(0).value).isEqualTo(1);
=======
assertThat(cr.all.get(0).value.intValue()).is(1);
assertThat(Iterables.getLast(ci.messages).message).isEqualTo(
"Uploaded patch set 1: Code-Review+1.");
>>>>>>>
assertThat(cr.all.get(0).value).isEqualTo(1);
assertThat(Iterables.getLast(ci.messages).message).isEqualTo(
"Uploaded patch set 1: Code-Review+1.");
<<<<<<<
assertThat(cr.all.get(0).value).isEqualTo(2);
=======
assertThat(cr.all.get(0).value.intValue()).is(2);
push =
pushFactory.create(db, admin.getIdent(), PushOneCommit.SUBJECT,
"c.txt", "moreContent", r.getChangeId());
r = push.to(git, "refs/for/master/%l=Code-Review+2");
ci = get(r.getChangeId());
assertThat(Iterables.getLast(ci.messages).message).isEqualTo(
"Uploaded patch set 3.");
>>>>>>>
assertThat(cr.all.get(0).value).isEqualTo(2);
push =
pushFactory.create(db, admin.getIdent(), testRepo, PushOneCommit.SUBJECT,
"c.txt", "moreContent", r.getChangeId());
r = push.to("refs/for/master/%l=Code-Review+2");
ci = get(r.getChangeId());
assertThat(Iterables.getLast(ci.messages).message).isEqualTo(
"Uploaded patch set 3."); |
<<<<<<<
Map<String, Ref> allRefs = repo.getRefDatabase().getRefs(ALL);
for (Entry<String, Ref> entry : allRefs.entrySet()) {
=======
for (Entry<String, Ref> entry : repo.getAllRefs().entrySet()) {
String refName = entry.getKey();
if (!refName.startsWith("refs/heads") && !refName.startsWith("refs/tags")) {
continue;
}
>>>>>>>
Map<String, Ref> allRefs = repo.getRefDatabase().getRefs(ALL);
for (Entry<String, Ref> entry : allRefs.entrySet()) {
String refName = entry.getKey();
if (!refName.startsWith("refs/heads") && !refName.startsWith("refs/tags")) {
continue;
} |
<<<<<<<
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
=======
import com.google.common.collect.HashMultimap;
>>>>>>>
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.HashMultimap;
<<<<<<<
=======
import com.google.common.collect.Maps;
import com.google.common.collect.SetMultimap;
>>>>>>>
import com.google.common.collect.SetMultimap;
<<<<<<<
private ListMultimap<Change.Id, Ref> refsByChange;
private Map<ObjectId, Ref> refsById;
=======
private SetMultimap<ObjectId, Ref> refsById;
>>>>>>>
private ListMultimap<Change.Id, Ref> refsByChange;
private SetMultimap<ObjectId, Ref> refsById; |
<<<<<<<
descBlock.display(r.getChange(), null, false, r.getPatchSetInfo(), r.getAccounts(),
r.getSubmitTypeRecord(), commentLinkProcessor);
=======
descBlock.display(changeDetail, null, false, r.getPatchSetInfo(), r.getAccounts(),
r.getSubmitTypeRecord());
>>>>>>>
descBlock.display(changeDetail, null, false, r.getPatchSetInfo(), r.getAccounts(),
r.getSubmitTypeRecord(), commentLinkProcessor); |
<<<<<<<
List<SubmitStrategyOp> ops = new ArrayList<>(sorted.size());
boolean first = true;
=======
for (CodeReviewCommit c : sorted) {
if (c.getParentCount() > 1) {
// Since there is a merge commit, sort and prune again using
// MERGE_IF_NECESSARY semantics to avoid creating duplicate
// commits.
//
sorted = args.mergeUtil.reduceToMinimalMerge(args.mergeSorter, sorted);
break;
}
}
>>>>>>>
List<SubmitStrategyOp> ops = new ArrayList<>(sorted.size());
boolean first = true;
for (CodeReviewCommit c : sorted) {
if (c.getParentCount() > 1) {
// Since there is a merge commit, sort and prune again using
// MERGE_IF_NECESSARY semantics to avoid creating duplicate
// commits.
//
sorted = args.mergeUtil.reduceToMinimalMerge(args.mergeSorter, sorted);
break;
}
} |
<<<<<<<
import com.google.gerrit.common.data.GroupDetail;
=======
import com.google.gerrit.common.Nullable;
>>>>>>>
<<<<<<<
import com.google.gerrit.server.account.VersionedAccountDestinations;
import com.google.gerrit.server.account.VersionedAccountQueries;
import com.google.gerrit.server.account.GroupCache;
import com.google.gerrit.server.account.GroupDetailFactory;
=======
>>>>>>>
import com.google.gerrit.server.account.VersionedAccountDestinations;
import com.google.gerrit.server.account.VersionedAccountQueries;
<<<<<<<
final boolean allowsDrafts;
final ChangeIndex index;
=======
>>>>>>>
final ChangeIndex index;
<<<<<<<
allProjectsName, allUsersName, patchListCache, repoManager,
projectCache, groupCache, listChildProjects, submitStrategyFactory,
conflictsCache, trackingFooters,
indexes != null ? indexes.getSearchIndex() : null,
indexConfig,
=======
allProjectsName, patchListCache, repoManager, projectCache,
listChildProjects, indexes, submitStrategyFactory,
conflictsCache, trackingFooters, indexConfig, listMembers,
>>>>>>>
allProjectsName, allUsersName, patchListCache, repoManager,
projectCache, listChildProjects, submitStrategyFactory,
conflictsCache, trackingFooters,
indexes != null ? indexes.getSearchIndex() : null,
indexConfig, listMembers,
<<<<<<<
this.allowsDrafts = allowsDrafts;
this.index = index;
=======
>>>>>>>
this.index = index;
<<<<<<<
allProjectsName, allUsersName, patchListCache, repoManager,
projectCache, groupCache, listChildProjects, submitStrategyFactory,
conflictsCache, trackingFooters, index, indexConfig, allowsDrafts);
=======
allProjectsName, patchListCache, repoManager, projectCache,
listChildProjects, indexes, submitStrategyFactory, conflictsCache,
trackingFooters, indexConfig, listMembers, allowsDrafts);
>>>>>>>
allProjectsName, allUsersName, patchListCache, repoManager,
projectCache, listChildProjects, submitStrategyFactory,
conflictsCache, trackingFooters, index, indexConfig, listMembers,
allowsDrafts); |
<<<<<<<
type.setLinkName(cfg.getString("gitweb", null, "linkname"));
=======
if (type == null) {
url = null;
gitweb_cgi = null;
gitweb_css = null;
gitweb_js = null;
git_logo_png = null;
return;
}
>>>>>>>
if (type == null) {
url = null;
gitweb_cgi = null;
gitweb_css = null;
gitweb_js = null;
git_logo_png = null;
return;
}
type.setLinkName(cfg.getString("gitweb", null, "linkname")); |
<<<<<<<
import com.google.gerrit.server.account.AccountCache;
import com.google.gerrit.server.account.AccountJson;
=======
>>>>>>>
import com.google.gerrit.server.account.AccountJson; |
<<<<<<<
=======
import com.google.gerrit.server.util.MagicBranch;
import com.google.gwtorm.server.AtomicUpdate;
>>>>>>>
import com.google.gerrit.server.util.MagicBranch;
<<<<<<<
final String msg =
"Patch Set " + newPatchSet.getPatchSetId()
+ ": Commit message was updated";
change = patchSetInserterFactory
.create(git, revWalk, refControl, change, newCommit)
.setPatchSet(newPatchSet)
.setMessage(msg)
.setCopyLabels(true)
.setValidateForReceiveCommits(true)
.insert();
=======
final PatchSetInfo info =
patchSetInfoFactory.get(newCommit, newPatchSet.getId());
final String refName = newPatchSet.getRefName();
CommitReceivedEvent commitReceivedEvent =
new CommitReceivedEvent(new ReceiveCommand(ObjectId.zeroId(),
newCommit.getId(), refName.substring(0,
refName.lastIndexOf("/") + 1) + "new"), refControl
.getProjectControl().getProject(), refControl.getRefName(),
newCommit, user);
try {
commitValidators.validateForReceiveCommits(commitReceivedEvent);
} catch (CommitValidationException e) {
throw new InvalidChangeOperationException(e.getMessage());
}
final RefUpdate ru = git.updateRef(newPatchSet.getRefName());
ru.setExpectedOldObjectId(ObjectId.zeroId());
ru.setNewObjectId(newCommit);
ru.disableRefLog();
if (ru.update(revWalk) != RefUpdate.Result.NEW) {
throw new IOException(String.format(
"Failed to create ref %s in %s: %s", newPatchSet.getRefName(),
change.getDest().getParentKey().get(), ru.getResult()));
}
gitRefUpdated.fire(change.getProject(), ru);
db.changes().beginTransaction(change.getId());
try {
Change updatedChange = db.changes().get(change.getId());
if (updatedChange != null && updatedChange.getStatus().isOpen()) {
change = updatedChange;
} else {
throw new InvalidChangeOperationException(String.format(
"Change %s is closed", change.getId()));
}
ChangeUtil.insertAncestors(db, newPatchSet.getId(), commit);
db.patchSets().insert(Collections.singleton(newPatchSet));
updatedChange =
db.changes().atomicUpdate(change.getId(), new AtomicUpdate<Change>() {
@Override
public Change update(Change change) {
if (change.getStatus().isClosed()) {
return null;
}
if (!change.currentPatchSetId().equals(patchSetId)) {
return null;
}
if (change.getStatus() != Change.Status.DRAFT) {
change.setStatus(Change.Status.NEW);
}
change.setLastSha1MergeTested(null);
change.setCurrentPatchSet(info);
ChangeUtil.updated(change);
return change;
}
});
if (updatedChange != null) {
change = updatedChange;
} else {
throw new InvalidChangeOperationException(String.format(
"Change %s was modified", change.getId()));
}
ApprovalsUtil.copyLabels(db,
refControl.getProjectControl().getLabelTypes(),
originalPS.getId(),
change.currentPatchSetId());
final List<FooterLine> footerLines = newCommit.getFooterLines();
updateTrackingIds(db, change, trackingFooters, footerLines);
final ChangeMessage cmsg =
new ChangeMessage(new ChangeMessage.Key(changeId,
ChangeUtil.messageUUID(db)), user.getAccountId(), patchSetId);
final String msg = "Patch Set " + newPatchSet.getPatchSetId() + ": Commit message was updated";
cmsg.setMessage(msg);
db.changeMessages().insert(Collections.singleton(cmsg));
db.commit();
final CommitMessageEditedSender cm = commitMessageEditedSenderFactory.create(change);
cm.setFrom(user.getAccountId());
cm.setChangeMessage(cmsg);
cm.send();
} finally {
db.rollback();
}
hooks.doPatchsetCreatedHook(change, newPatchSet, db);
>>>>>>>
final String msg =
"Patch Set " + newPatchSet.getPatchSetId()
+ ": Commit message was updated";
change = patchSetInserterFactory
.create(git, revWalk, refControl, change, newCommit)
.setPatchSet(newPatchSet)
.setMessage(msg)
.setCopyLabels(true)
.setValidateForReceiveCommits(true)
.insert(); |
<<<<<<<
"WARN: InitStep from plugin %s does not implement %s (Exception: %s)",
jar.getFileName(), InitStep.class.getName(), e.getMessage());
=======
"WARN: InitStep from plugin %s does not implement %s (Exception: %s)\n",
jar.getName(), InitStep.class.getName(), e.getMessage());
>>>>>>>
"WARN: InitStep from plugin %s does not implement %s (Exception: %s)\n",
jar.getFileName(), InitStep.class.getName(), e.getMessage()); |
<<<<<<<
=======
import com.google.gerrit.common.ChangeHooks;
import com.google.gerrit.common.Die;
import com.google.gerrit.common.DisabledChangeHooks;
import com.google.gerrit.extensions.events.GitReferenceUpdatedListener;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.extensions.registration.DynamicMap;
import com.google.gerrit.extensions.registration.DynamicSet;
>>>>>>>
import com.google.gerrit.common.Die;
<<<<<<<
threads = ThreadLimiter.limitThreads(dbInjector, threads);
=======
cfg = dbInjector.getInstance(
Key.get(Config.class, GerritServerConfig.class));
checkNotSlaveMode();
limitThreads();
>>>>>>>
threads = ThreadLimiter.limitThreads(dbInjector, threads);
cfg = dbInjector.getInstance(
Key.get(Config.class, GerritServerConfig.class));
checkNotSlaveMode();
<<<<<<<
=======
private void checkNotSlaveMode() throws Die {
if (cfg.getBoolean("container", "slave", false)) {
throw die("Cannot run reindex in slave mode");
}
}
private void limitThreads() {
boolean usePool = cfg.getBoolean("database", "connectionpool",
dbInjector.getInstance(DataSourceType.class).usePool());
int poolLimit = cfg.getInt("database", "poollimit",
DataSourceProvider.DEFAULT_POOL_LIMIT);
if (usePool && threads > poolLimit) {
log.warn("Limiting reindexing to " + poolLimit
+ " threads due to database.poolLimit");
threads = poolLimit;
}
}
>>>>>>>
private void checkNotSlaveMode() throws Die {
if (cfg.getBoolean("container", "slave", false)) {
throw die("Cannot run reindex in slave mode");
}
} |
<<<<<<<
import com.google.common.base.Strings;
=======
import com.google.gerrit.common.ChangeHooks;
>>>>>>>
import com.google.common.base.Strings;
import com.google.gerrit.common.ChangeHooks; |
<<<<<<<
@Source("listAdd.png")
public ImageResource listAdd();
=======
@Source("important.png")
public ImageResource important();
>>>>>>>
@Source("listAdd.png")
public ImageResource listAdd();
@Source("important.png")
public ImageResource important(); |
<<<<<<<
PrivacyHelper privacyHelper,
=======
BatchPayloadPublisher batchPayloadPublisher,
>>>>>>>
PrivacyHelper privacyHelper,
BatchPayloadPublisher batchPayloadPublisher,
<<<<<<<
=======
payloadPublisher,
batchPayloadPublisher,
>>>>>>>
payloadPublisher,
batchPayloadPublisher,
<<<<<<<
=======
PayloadPublisher payloadPublisher,
BatchPayloadPublisher batchPayloadPublisher,
>>>>>>>
PayloadPublisher payloadPublisher,
BatchPayloadPublisher batchPayloadPublisher,
<<<<<<<
=======
this.payloadPublisher = Objects.requireNonNull(payloadPublisher, "payloadPublisher is required");
this.batchPayloadPublisher = Objects.requireNonNull(batchPayloadPublisher, "batchPayloadPublisher is required");
>>>>>>>
this.payloadPublisher = Objects.requireNonNull(payloadPublisher, "payloadPublisher is required");
this.batchPayloadPublisher = Objects.requireNonNull(batchPayloadPublisher, "batchPayloadPublisher is required");
<<<<<<<
boolean publish(List<PublicKey> recipientList, EncodedPayload payload) {
recipientList.stream()
.filter(k -> !enclave.getPublicKeys().contains(k))
.forEach(
recipient -> {
final EncodedPayload outgoing = payloadEncoder.forRecipient(payload, recipient);
payloadPublisher.publishPayload(outgoing, recipient);
});
return true;
=======
return SendResponse.from(transactionHash);
>>>>>>>
return SendResponse.from(transactionHash); |
<<<<<<<
throws IntegrationException {
=======
throws MergeException {
// Test for merge instead of cherry pick to avoid false negatives
// on commit chains.
>>>>>>>
throws IntegrationException {
// Test for merge instead of cherry pick to avoid false negatives
// on commit chains. |
<<<<<<<
stdout.print(String.format("%8s %-12s %-12s %-4s %s\n", //
id(task.getTaskId()), start, startTime, "", format(task)));
=======
stdout.print(String.format(
"%8s %-12s %-8s %s\n",
id(taskInfo.getTaskId()), start, "",
taskInfo.getTaskString(taskNameWidth)));
>>>>>>>
stdout.print(String.format("%8s %-12s %-12s %-4s %s\n", //
id(taskInfo.getTaskId()), start, startTime, "",
taskInfo.getTaskString(taskNameWidth)));
<<<<<<<
stdout.print(String.format("%8s %-12s %-4s %s\n", //
id(task.getTaskId()), start, startTime, remoteName));
=======
stdout.print(String.format("%8s %-12s %-8s %s\n",
id(taskInfo.getTaskId()), start, "", remoteName));
>>>>>>>
stdout.print(String.format("%8s %-12s %-4s %s\n", //
id(taskInfo.getTaskId()), start, startTime, remoteName)); |
<<<<<<<
private final CreateChange createChange;
=======
private final ChangeIndexer changeIndexer;
>>>>>>>
private final CreateChange createChange;
private final ChangeIndexer changeIndexer;
<<<<<<<
DynamicMap<RestView<ChangeResource>> views,
CreateChange createChange) {
=======
DynamicMap<RestView<ChangeResource>> views,
ChangeIndexer changeIndexer) {
>>>>>>>
DynamicMap<RestView<ChangeResource>> views,
CreateChange createChange,
ChangeIndexer changeIndexer) {
<<<<<<<
this.createChange = createChange;
=======
this.changeIndexer = changeIndexer;
>>>>>>>
this.createChange = createChange;
this.changeIndexer = changeIndexer; |
<<<<<<<
import com.google.gerrit.server.config.AllProjectsNameProvider;
=======
import com.google.gerrit.server.git.GitRepositoryManager;
>>>>>>>
import com.google.gerrit.server.config.AllProjectsNameProvider;
<<<<<<<
ChangeProjectAccess(ProjectAccessFactory.Factory projectAccessFactory,
ProjectControl.Factory projectControlFactory,
ProjectCache projectCache, GroupBackend groupBackend,
MetaDataUpdate.User metaDataUpdateFactory,
AllProjectsNameProvider allProjects,
Provider<SetParent> setParent,
=======
ChangeProjectAccess(final ProjectAccessFactory.Factory projectAccessFactory,
final ProjectControl.Factory projectControlFactory,
final ProjectCache projectCache, final GroupBackend groupBackend,
final MetaDataUpdate.User metaDataUpdateFactory,
ChangeHooks hooks, IdentifiedUser user,
>>>>>>>
ChangeProjectAccess(ProjectAccessFactory.Factory projectAccessFactory,
ProjectControl.Factory projectControlFactory,
ProjectCache projectCache, GroupBackend groupBackend,
MetaDataUpdate.User metaDataUpdateFactory,
AllProjectsNameProvider allProjects,
Provider<SetParent> setParent,
ChangeHooks hooks, IdentifiedUser user,
<<<<<<<
MetaDataUpdate md, boolean parentProjectUpdate) throws IOException,
NoSuchProjectException, ConfigInvalidException {
config.commit(md);
=======
MetaDataUpdate md) throws IOException, NoSuchProjectException, ConfigInvalidException {
RevCommit commit = config.commit(md);
hooks.doRefUpdatedHook(
new Branch.NameKey(config.getProject().getNameKey(), GitRepositoryManager.REF_CONFIG),
base, commit.getId(), user.getAccount());
>>>>>>>
MetaDataUpdate md, boolean parentProjectUpdate) throws IOException,
NoSuchProjectException, ConfigInvalidException {
RevCommit commit = config.commit(md);
hooks.doRefUpdatedHook(
new Branch.NameKey(config.getProject().getNameKey(), RefNames.REFS_CONFIG),
base, commit.getId(), user.getAccount()); |
<<<<<<<
import com.google.gerrit.server.mime.MimeUtil2Module;
import com.google.gerrit.server.patch.IntraLineWorkerPool;
=======
import com.google.gerrit.server.patch.DiffExecutorModule;
>>>>>>>
import com.google.gerrit.server.mime.MimeUtil2Module;
import com.google.gerrit.server.patch.DiffExecutorModule;
<<<<<<<
modules.add(new IntraLineWorkerPool.Module());
modules.add(new MimeUtil2Module());
=======
modules.add(new MergeabilityChecksExecutorModule());
modules.add(new DiffExecutorModule());
>>>>>>>
modules.add(new DiffExecutorModule());
modules.add(new MimeUtil2Module()); |
<<<<<<<
=======
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.PersonIdent;
>>>>>>>
<<<<<<<
return Guice.createInjector(
Stage.DEVELOPMENT,
new AbstractModule() {
@Override
protected void configure() {
bind(SchemaVersion.class).to(SchemaVersion.C);
for (Key<?> k :
new Key<?>[] {
Key.get(PersonIdent.class, GerritPersonIdent.class),
Key.get(String.class, AnonymousCowardName.class),
}) {
rebind(parent, k);
}
for (Class<?> c :
new Class<?>[] {
AllProjectsName.class,
AllUsersCreator.class,
AllUsersName.class,
GitRepositoryManager.class,
SitePaths.class,
SystemGroupBackend.class,
}) {
rebind(parent, Key.get(c));
}
}
private <T> void rebind(Injector parent, Key<T> c) {
bind(c).toProvider(parent.getProvider(c));
}
});
=======
return Guice.createInjector(Stage.DEVELOPMENT, new AbstractModule() {
@Override
protected void configure() {
bind(SchemaVersion.class).to(SchemaVersion.C);
for (Key<?> k : new Key<?>[]{
Key.get(PersonIdent.class, GerritPersonIdent.class),
Key.get(String.class, AnonymousCowardName.class),
Key.get(Config.class, GerritServerConfig.class),
}) {
rebind(parent, k);
}
for (Class<?> c : new Class<?>[] {
AllProjectsName.class,
AllUsersCreator.class,
AllUsersName.class,
GitRepositoryManager.class,
SitePaths.class,
}) {
rebind(parent, Key.get(c));
}
}
private <T> void rebind(Injector parent, Key<T> c) {
bind(c).toProvider(parent.getProvider(c));
}
});
>>>>>>>
return Guice.createInjector(
Stage.DEVELOPMENT,
new AbstractModule() {
@Override
protected void configure() {
bind(SchemaVersion.class).to(SchemaVersion.C);
for (Key<?> k :
new Key<?>[] {
Key.get(PersonIdent.class, GerritPersonIdent.class),
Key.get(String.class, AnonymousCowardName.class),
Key.get(Config.class, GerritServerConfig.class),
}) {
rebind(parent, k);
}
for (Class<?> c :
new Class<?>[] {
AllProjectsName.class,
AllUsersCreator.class,
AllUsersName.class,
GitRepositoryManager.class,
SitePaths.class,
SystemGroupBackend.class,
}) {
rebind(parent, Key.get(c));
}
}
private <T> void rebind(Injector parent, Key<T> c) {
bind(c).toProvider(parent.getProvider(c));
}
}); |
<<<<<<<
=======
import java.util.Date;
import java.util.HashMap;
>>>>>>>
import java.util.HashMap; |
<<<<<<<
=======
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
>>>>>>>
import com.google.common.primitives.Ints;
<<<<<<<
=======
import java.io.IOException;
import java.util.Collections;
>>>>>>>
import java.io.IOException;
<<<<<<<
ChangeUtil changeUtil,
CreateChange createChange) {
=======
CreateChange createChange,
ChangeIndexer changeIndexer) {
this.db = dbProvider;
>>>>>>>
ChangeUtil changeUtil,
CreateChange createChange,
ChangeIndexer changeIndexer) {
<<<<<<<
List<Change> changes = changeUtil.findChanges(id.encoded());
=======
List<Change> changes = findChanges(id.encoded());
if (changes.isEmpty()) {
Integer changeId = Ints.tryParse(id.get());
if (changeId != null) {
try {
changeIndexer.delete(changeId);
} catch (IOException e) {
throw new ResourceNotFoundException(id.get(), e);
}
}
}
>>>>>>>
List<Change> changes = changeUtil.findChanges(id.encoded());
if (changes.isEmpty()) {
Integer changeId = Ints.tryParse(id.get());
if (changeId != null) {
try {
changeIndexer.delete(changeId);
} catch (IOException e) {
throw new ResourceNotFoundException(id.get(), e);
}
}
} |
<<<<<<<
private void setCreatedOnToNull(AccountGroup.Id groupId) throws Exception {
AccountGroup group = db.accountGroups().get(groupId);
group.setCreatedOn(null);
db.accountGroups().update(ImmutableList.of(group));
groupCache.evict(group);
}
=======
private void assertBadRequest(Groups.ListRequest req) throws Exception {
try {
req.get();
fail("Expected BadRequestException");
} catch (BadRequestException e) {
// Expected
}
}
>>>>>>>
private void setCreatedOnToNull(AccountGroup.Id groupId) throws Exception {
AccountGroup group = db.accountGroups().get(groupId);
group.setCreatedOn(null);
db.accountGroups().update(ImmutableList.of(group));
groupCache.evict(group);
}
private void assertBadRequest(Groups.ListRequest req) throws Exception {
try {
req.get();
fail("Expected BadRequestException");
} catch (BadRequestException e) {
// Expected
}
} |
<<<<<<<
import com.quorum.tessera.partyinfo.*;
import com.quorum.tessera.recover.resend.BatchResendManager;
=======
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.partyinfo.ResendRequest;
import com.quorum.tessera.partyinfo.ResendResponse;
>>>>>>>
import com.quorum.tessera.enclave.EncodedPayload;
import com.quorum.tessera.enclave.PayloadEncoder;
import com.quorum.tessera.partyinfo.ResendRequest;
import com.quorum.tessera.partyinfo.ResendResponse;
import com.quorum.tessera.partyinfo.*;
import com.quorum.tessera.recover.resend.BatchResendManager;
<<<<<<<
private BatchResendManager batchResendManager;
=======
private PayloadEncoder payloadEncoder;
>>>>>>>
private BatchResendManager batchResendManager;
private PayloadEncoder payloadEncoder;
<<<<<<<
transactionManager = mock(TransactionManager.class);
batchResendManager = mock(BatchResendManager.class);
transactionResource = new TransactionResource(transactionManager, batchResendManager);
=======
this.transactionManager = mock(TransactionManager.class);
this.payloadEncoder = mock(PayloadEncoder.class);
this.transactionResource = new TransactionResource(transactionManager, payloadEncoder);
>>>>>>>
transactionManager = mock(TransactionManager.class);
batchResendManager = mock(BatchResendManager.class);
transactionResource = new TransactionResource(transactionManager, batchResendManager);
this.transactionManager = mock(TransactionManager.class);
this.payloadEncoder = mock(PayloadEncoder.class);
this.transactionResource = new TransactionResource(transactionManager, payloadEncoder); |
<<<<<<<
import com.google.common.collect.Iterables;
=======
import com.google.common.collect.Lists;
>>>>>>>
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
<<<<<<<
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
=======
import com.google.gerrit.reviewdb.client.PatchSetApproval;
>>>>>>>
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
<<<<<<<
import com.google.gwtorm.server.OrmException;
=======
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.SchemaFactory;
>>>>>>>
import com.google.gwtorm.server.OrmException; |
<<<<<<<
@Test
public void testStore() {
EncodedPayloadWithRecipients payload = mock(EncodedPayloadWithRecipients.class);
when(txService.encryptPayload(any(), any(), any())).thenReturn(payload);
enclave.store(new byte[0], new byte[0][0], new byte[0]);
verify(txService).encryptPayload(any(), any(), any());
verify(txService).storeEncodedPayload(payload);
}
@Test
public void testStoreWithRecipientStuyff() {
EncodedPayloadWithRecipients payload = mock(EncodedPayloadWithRecipients.class);
when(txService.encryptPayload(any(), any(), any())).thenReturn(payload);
byte[][] recipients = new byte[1][1];
recipients[0] = new byte[] {'P'};
enclave.store(new byte[0],recipients, new byte[0]);
verify(txService).encryptPayload(any(), any(), any());
verify(txService).storeEncodedPayload(payload);
}
=======
>>>>>>>
@Test
public void testStore() {
EncodedPayloadWithRecipients payload = mock(EncodedPayloadWithRecipients.class);
when(txService.encryptPayload(any(), any(), any())).thenReturn(payload);
enclave.store(new byte[0], new byte[0][0], new byte[0]);
verify(txService).encryptPayload(any(), any(), any());
verify(txService).storeEncodedPayload(payload);
}
@Test
public void testStoreWithRecipientStuyff() {
EncodedPayloadWithRecipients payload = mock(EncodedPayloadWithRecipients.class);
when(txService.encryptPayload(any(), any(), any())).thenReturn(payload);
byte[][] recipients = new byte[1][1];
recipients[0] = new byte[] {'P'};
enclave.store(new byte[0],recipients, new byte[0]);
verify(txService).encryptPayload(any(), any(), any());
verify(txService).storeEncodedPayload(payload);
} |
<<<<<<<
baseConfig.setInt("receive", null, "changeUpdateThreads", 4);
if (classDesc.equals(methodDesc)) {
=======
if (classDesc.equals(methodDesc) && !classDesc.sandboxed() &&
!methodDesc.sandboxed()) {
>>>>>>>
baseConfig.setInt("receive", null, "changeUpdateThreads", 4);
if (classDesc.equals(methodDesc) && !classDesc.sandboxed() &&
!methodDesc.sandboxed()) { |
<<<<<<<
import com.google.common.base.CharMatcher;
import com.google.common.base.Joiner;
=======
import com.google.common.base.Objects;
>>>>>>>
import com.google.common.base.CharMatcher;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
<<<<<<<
import com.google.gerrit.server.config.AllProjectsNameProvider;
import com.google.gerrit.server.config.PluginConfig;
import com.google.gerrit.server.config.PluginConfigFactory;
import com.google.gerrit.server.config.ProjectConfigEntry;
import com.google.gerrit.server.git.GitRepositoryManager;
=======
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.git.GitRepositoryManager;
>>>>>>>
import com.google.gerrit.server.config.AllProjectsNameProvider;
import com.google.gerrit.server.config.PluginConfig;
import com.google.gerrit.server.config.PluginConfigFactory;
import com.google.gerrit.server.config.ProjectConfigEntry;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.git.GitRepositoryManager;
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
import org.eclipse.jgit.lib.ObjectId;
>>>>>>>
import org.eclipse.jgit.lib.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
projectConfig.commit(md);
projectCache.evict(projectConfig.getProject());
gitMgr.setProjectDescription(projectName, p.getDescription());
=======
ObjectId baseRev = projectConfig.getRevision();
ObjectId commitRev = projectConfig.commit(md);
// Only fire hook if project was actually changed.
if (!Objects.equal(baseRev, commitRev)) {
IdentifiedUser user = (IdentifiedUser) currentUser.get();
hooks.doRefUpdatedHook(
new Branch.NameKey(projectName, GitRepositoryManager.REF_CONFIG),
baseRev, commitRev, user.getAccount());
};
(new PerRequestProjectControlCache(projectCache, self.get()))
.evict(projectConfig.getProject());
>>>>>>>
ObjectId baseRev = projectConfig.getRevision();
ObjectId commitRev = projectConfig.commit(md);
// Only fire hook if project was actually changed.
if (!Objects.equal(baseRev, commitRev)) {
IdentifiedUser user = (IdentifiedUser) currentUser.get();
hooks.doRefUpdatedHook(
new Branch.NameKey(projectName, RefNames.REFS_CONFIG),
baseRev, commitRev, user.getAccount());
};
projectCache.evict(projectConfig.getProject());
gitMgr.setProjectDescription(projectName, p.getDescription()); |
<<<<<<<
import com.quorum.tessera.api.constraint.PrivacyValid;
import com.quorum.tessera.config.constraints.ValidBase64;
=======
>>>>>>>
import com.quorum.tessera.api.constraint.PrivacyValid;
import com.quorum.tessera.config.constraints.ValidBase64;
<<<<<<<
public Response send(
@ApiParam(name = "sendRequest", required = true) @NotNull @Valid @PrivacyValid
final SendRequest sendRequest)
=======
public Response send(
@ApiParam(name = "sendRequest", required = true) @NotNull @Valid
final SendRequest sendRequest)
>>>>>>>
public Response send(
@ApiParam(name = "sendRequest", required = true) @NotNull @Valid @PrivacyValid
final SendRequest sendRequest)
<<<<<<<
final PublicKey sender =
Optional.ofNullable(sendRequest.getFrom())
.map(base64Decoder::decode)
.map(PublicKey::from)
.orElseGet(transactionManager::defaultPublicKey);
final List<PublicKey> recipientList =
Stream.of(sendRequest)
.filter(sr -> Objects.nonNull(sr.getTo()))
.flatMap(s -> Stream.of(s.getTo()))
.map(base64Decoder::decode)
.map(PublicKey::from)
.collect(Collectors.toList());
final Set<MessageHash> affectedTransactions =
Stream.ofNullable(sendRequest.getAffectedContractTransactions())
.flatMap(Arrays::stream)
.map(Base64.getDecoder()::decode)
.map(MessageHash::new)
.collect(Collectors.toSet());
final byte[] execHash =
Optional.ofNullable(sendRequest.getExecHash()).map(String::getBytes).orElse(new byte[0]);
final PrivacyMode privacyMode = PrivacyMode.fromFlag(sendRequest.getPrivacyFlag());
final com.quorum.tessera.transaction.SendRequest request =
com.quorum.tessera.transaction.SendRequest.Builder.create()
.withRecipients(recipientList)
.withSender(sender)
.withPayload(sendRequest.getPayload())
.withExecHash(execHash)
.withPrivacyMode(privacyMode)
.withAffectedContractTransactions(affectedTransactions)
.build();
=======
PublicKey sender = Optional.ofNullable(sendRequest.getFrom())
.map(base64Decoder::decode)
.map(PublicKey::from)
.orElseGet(transactionManager::defaultPublicKey);
final List<PublicKey> recipientList =
Stream.of(sendRequest)
.filter(sr -> Objects.nonNull(sr.getTo()))
.flatMap(s -> Stream.of(s.getTo()))
.map(base64Decoder::decode)
.map(PublicKey::from)
.collect(Collectors.toList());
com.quorum.tessera.transaction.SendRequest request = com.quorum.tessera.transaction.SendRequest.Builder.create()
.withRecipients(recipientList)
.withSender(sender)
.withPayload(sendRequest.getPayload())
.build();
>>>>>>>
PublicKey sender = Optional.ofNullable(sendRequest.getFrom())
.map(base64Decoder::decode)
.map(PublicKey::from)
.orElseGet(transactionManager::defaultPublicKey);
final List<PublicKey> recipientList =
Stream.of(sendRequest)
.filter(sr -> Objects.nonNull(sr.getTo()))
.flatMap(s -> Stream.of(s.getTo()))
.map(base64Decoder::decode)
.map(PublicKey::from)
.collect(Collectors.toList());
final Set<MessageHash> affectedTransactions =
Stream.ofNullable(sendRequest.getAffectedContractTransactions())
.flatMap(Arrays::stream)
.map(Base64.getDecoder()::decode)
.map(MessageHash::new)
.collect(Collectors.toSet());
final byte[] execHash =
Optional.ofNullable(sendRequest.getExecHash()).map(String::getBytes).orElse(new byte[0]);
final PrivacyMode privacyMode = PrivacyMode.fromFlag(sendRequest.getPrivacyFlag());
final com.quorum.tessera.transaction.SendRequest request =
com.quorum.tessera.transaction.SendRequest.Builder.create()
.withRecipients(recipientList)
.withSender(sender)
.withPayload(sendRequest.getPayload())
.withExecHash(execHash)
.withPrivacyMode(privacyMode)
.withAffectedContractTransactions(affectedTransactions)
.build(); |
<<<<<<<
import static com.google.gerrit.server.mail.MailUtil.getRecipientsFromReviewers;
=======
>>>>>>>
import static com.google.gerrit.server.mail.MailUtil.getRecipientsFromReviewers;
<<<<<<<
reject(inputCommand, "commit already exists");
=======
reject(inputCommand, "commit already exists (as current patchset)");
>>>>>>>
reject(inputCommand, "commit already exists (as current patchset)"); |
<<<<<<<
=======
import static org.eclipse.jgit.lib.Constants.R_HEADS;
import com.google.gerrit.extensions.restapi.AuthException;
>>>>>>>
import static org.eclipse.jgit.lib.Constants.R_HEADS; |
<<<<<<<
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.changes.ReviewInput.NotifyHandling;
=======
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.extensions.api.projects.BranchInput;
>>>>>>>
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.changes.ReviewInput.NotifyHandling;
import com.google.gerrit.extensions.api.projects.BranchInput;
<<<<<<<
=======
import org.eclipse.jgit.transport.PushResult;
import org.joda.time.DateTime;
import org.joda.time.DateTimeUtils;
import org.joda.time.DateTimeUtils.MillisProvider;
>>>>>>>
import org.eclipse.jgit.transport.PushResult;
<<<<<<<
import java.util.Collection;
=======
import java.util.List;
>>>>>>>
import java.util.Collection;
import java.util.List; |
<<<<<<<
RefControl refControl) throws InvalidChangeOperationException,
IOException, OrmException, NoSuchChangeException {
final ChangeControl changeControl =
refControl.getProjectControl().controlFor(change);
=======
RefControl refControl, IdentifiedUser uploader)
throws InvalidChangeOperationException, IOException, OrmException,
NoSuchChangeException {
>>>>>>>
RefControl refControl, IdentifiedUser uploader)
throws InvalidChangeOperationException, IOException, OrmException,
NoSuchChangeException {
final ChangeControl changeControl =
refControl.getProjectControl().controlFor(change); |
<<<<<<<
@UiHandler("addme")
void onAddMe(@SuppressWarnings("unused") ClickEvent e) {
=======
@UiHandler("addMe")
void onAddMe(ClickEvent e) {
>>>>>>>
@UiHandler("addMe")
void onAddMe(@SuppressWarnings("unused") ClickEvent e) { |
<<<<<<<
import com.google.inject.Singleton;
=======
import com.google.inject.Provider;
>>>>>>>
import com.google.inject.Provider;
import com.google.inject.Singleton; |
<<<<<<<
=======
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.UncheckedExecutionException;
>>>>>>>
import com.google.common.util.concurrent.UncheckedExecutionException;
<<<<<<<
return cache.get(key, new Loader(key, dest, repo));
} catch (ExecutionException e) {
=======
return cache.get(key, new Loader(key, dest, repo, db));
} catch (ExecutionException | UncheckedExecutionException e) {
>>>>>>>
return cache.get(key, new Loader(key, dest, repo));
} catch (ExecutionException | UncheckedExecutionException e) { |
<<<<<<<
eventFactory.addPatchSets(db, rw, c, d.patchSets(),
=======
eventFactory.addPatchSets(db.get(), rw, c, d.visiblePatchSets(),
>>>>>>>
eventFactory.addPatchSets(db, rw, c, d.visiblePatchSets(),
<<<<<<<
eventFactory.addPatchSets(db, rw, c, d.patchSets(),
=======
eventFactory.addPatchSets(db.get(), rw, c, d.visiblePatchSets(),
>>>>>>>
eventFactory.addPatchSets(db, rw, c, d.visiblePatchSets(), |
<<<<<<<
String workdir();
String socket();
=======
Set<String> whitelist();
>>>>>>>
Set<String> whitelist();
String workdir();
String socket(); |
<<<<<<<
if (!isAdmin) {
GroupControl c = groupControlFactory.controlFor(group);
if (!c.isVisible()) {
continue;
}
}
=======
>>>>>>> |
<<<<<<<
import com.google.common.collect.Iterables;
=======
import static com.google.common.base.Preconditions.checkNotNull;
>>>>>>>
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.Iterables; |
<<<<<<<
=======
if (input.email != null && !email.equals(input.email)) {
throw new BadRequestException("email address must match URL");
}
if (!EmailValidator.getInstance().isValid(email)) {
throw new BadRequestException("invalid email address");
}
>>>>>>>
if (!EmailValidator.getInstance().isValid(email)) {
throw new BadRequestException("invalid email address");
} |
<<<<<<<
public ReviewResult review(ReviewInput in) {
=======
public void review(ReviewInput in) throws RestApiException {
>>>>>>>
public ReviewResult review(ReviewInput in) throws RestApiException {
<<<<<<<
public CommitInfo commit(boolean addLinks) {
throw new NotImplementedException();
}
@Override
public Map<String, List<CommentInfo>> comments() {
=======
public Map<String, List<CommentInfo>> comments() throws RestApiException {
>>>>>>>
public CommitInfo commit(boolean addLinks) throws RestApiException {
throw new NotImplementedException();
}
@Override
public Map<String, List<CommentInfo>> comments() throws RestApiException {
<<<<<<<
public EditInfo applyFix(String fixId) {
throw new NotImplementedException();
}
@Override
public Map<String, List<CommentInfo>> drafts() {
=======
public Map<String, List<CommentInfo>> drafts() throws RestApiException {
>>>>>>>
public EditInfo applyFix(String fixId) throws RestApiException {
throw new NotImplementedException();
}
@Override
public Map<String, List<CommentInfo>> drafts() throws RestApiException { |
<<<<<<<
@Override
public String workdir() { return "qdata"; };
@Override
public String socket() { return "/tmp/tst1.ipc"; };
=======
@Override
public Set<String> whitelist() {
return emptySet();
}
>>>>>>>
@Override
public Set<String> whitelist() {
return emptySet();
}
@Override
public String workdir() { return "qdata"; };
@Override
public String socket() { return "/tmp/tst1.ipc"; }; |
<<<<<<<
=======
import com.google.gerrit.common.data.GroupReference;
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.extensions.api.changes.CherryPickInput;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.changes.SubmitInput;
import com.google.gerrit.extensions.api.groups.GroupApi;
import com.google.gerrit.extensions.api.projects.BranchInfo;
import com.google.gerrit.extensions.api.projects.BranchInput;
import com.google.gerrit.extensions.client.InheritableBoolean;
>>>>>>>
import com.google.gerrit.extensions.api.changes.CherryPickInput;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.changes.SubmitInput;
import com.google.gerrit.extensions.api.projects.BranchInfo;
import com.google.gerrit.extensions.api.projects.BranchInput;
import com.google.gerrit.extensions.client.InheritableBoolean;
<<<<<<<
import com.google.gerrit.extensions.common.ServerInfo;
=======
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.ChangeInput;
import com.google.gerrit.extensions.restapi.AuthException;
>>>>>>>
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.ChangeInput;
import com.google.gerrit.extensions.common.ServerInfo;
import com.google.gerrit.extensions.restapi.AuthException;
<<<<<<<
private void assertAgreement(AgreementInfo info, ContributorAgreement ca) {
assertThat(info.name).isEqualTo(ca.getName());
assertThat(info.description).isEqualTo(ca.getDescription());
assertThat(info.url).isEqualTo(ca.getAgreementUrl());
if (ca.getAutoVerify() != null) {
assertThat(info.autoVerifyGroup.name)
.isEqualTo(ca.getAutoVerify().getName());
} else {
assertThat(info.autoVerifyGroup).isNull();
}
}
=======
@Test
public void revertChangeWithoutCLA() throws Exception {
assume().that(isContributorAgreementsEnabled()).isTrue();
// Create a change succeeds when agreement is not required
setUseContributorAgreements(InheritableBoolean.FALSE);
ChangeInfo change = gApi.changes().create(newChangeInput()).get();
// Approve and submit it
setApiUser(admin);
gApi.changes().id(change.changeId).current().review(ReviewInput.approve());
gApi.changes().id(change.changeId).current().submit(new SubmitInput());
// Revert is not allowed when CLA is required but not signed
setApiUser(user);
setUseContributorAgreements(InheritableBoolean.TRUE);
exception.expect(AuthException.class);
exception.expectMessage("A Contributor Agreement must be completed");
gApi.changes().id(change.changeId).revert();
}
@Test
public void cherrypickChangeWithoutCLA() throws Exception {
assume().that(isContributorAgreementsEnabled()).isTrue();
// Create a new branch
setApiUser(admin);
BranchInfo dest = gApi.projects().name(project.get())
.branch("cherry-pick-to").create(new BranchInput()).get();
// Create a change succeeds when agreement is not required
setUseContributorAgreements(InheritableBoolean.FALSE);
ChangeInfo change = gApi.changes().create(newChangeInput()).get();
// Approve and submit it
gApi.changes().id(change.changeId).current().review(ReviewInput.approve());
gApi.changes().id(change.changeId).current().submit(new SubmitInput());
// Cherry-pick is not allowed when CLA is required but not signed
setApiUser(user);
setUseContributorAgreements(InheritableBoolean.TRUE);
CherryPickInput in = new CherryPickInput();
in.destination = dest.ref;
in.message = change.subject;
exception.expect(AuthException.class);
exception.expectMessage("A Contributor Agreement must be completed");
gApi.changes().id(change.changeId).current().cherryPick(in);
}
@Test
public void createChangeWithoutCLA() throws Exception {
assume().that(isContributorAgreementsEnabled()).isTrue();
// Create a change succeeds when agreement is not required
setUseContributorAgreements(InheritableBoolean.FALSE);
gApi.changes().create(newChangeInput());
// Create a change is not allowed when CLA is required but not signed
setUseContributorAgreements(InheritableBoolean.TRUE);
exception.expect(AuthException.class);
exception.expectMessage("A Contributor Agreement must be completed");
gApi.changes().create(newChangeInput());
}
private ChangeInput newChangeInput() {
ChangeInput in = new ChangeInput();
in.branch = "master";
in.subject = "test";
in.project = project.get();
return in;
}
>>>>>>>
@Test
public void revertChangeWithoutCLA() throws Exception {
assume().that(isContributorAgreementsEnabled()).isTrue();
// Create a change succeeds when agreement is not required
setUseContributorAgreements(InheritableBoolean.FALSE);
ChangeInfo change = gApi.changes().create(newChangeInput()).get();
// Approve and submit it
setApiUser(admin);
gApi.changes().id(change.changeId).current().review(ReviewInput.approve());
gApi.changes().id(change.changeId).current().submit(new SubmitInput());
// Revert is not allowed when CLA is required but not signed
setApiUser(user);
setUseContributorAgreements(InheritableBoolean.TRUE);
exception.expect(AuthException.class);
exception.expectMessage("A Contributor Agreement must be completed");
gApi.changes().id(change.changeId).revert();
}
@Test
public void cherrypickChangeWithoutCLA() throws Exception {
assume().that(isContributorAgreementsEnabled()).isTrue();
// Create a new branch
setApiUser(admin);
BranchInfo dest = gApi.projects().name(project.get())
.branch("cherry-pick-to").create(new BranchInput()).get();
// Create a change succeeds when agreement is not required
setUseContributorAgreements(InheritableBoolean.FALSE);
ChangeInfo change = gApi.changes().create(newChangeInput()).get();
// Approve and submit it
gApi.changes().id(change.changeId).current().review(ReviewInput.approve());
gApi.changes().id(change.changeId).current().submit(new SubmitInput());
// Cherry-pick is not allowed when CLA is required but not signed
setApiUser(user);
setUseContributorAgreements(InheritableBoolean.TRUE);
CherryPickInput in = new CherryPickInput();
in.destination = dest.ref;
in.message = change.subject;
exception.expect(AuthException.class);
exception.expectMessage("A Contributor Agreement must be completed");
gApi.changes().id(change.changeId).current().cherryPick(in);
}
@Test
public void createChangeWithoutCLA() throws Exception {
assume().that(isContributorAgreementsEnabled()).isTrue();
// Create a change succeeds when agreement is not required
setUseContributorAgreements(InheritableBoolean.FALSE);
gApi.changes().create(newChangeInput());
// Create a change is not allowed when CLA is required but not signed
setUseContributorAgreements(InheritableBoolean.TRUE);
exception.expect(AuthException.class);
exception.expectMessage("A Contributor Agreement must be completed");
gApi.changes().create(newChangeInput());
}
private void assertAgreement(AgreementInfo info, ContributorAgreement ca) {
assertThat(info.name).isEqualTo(ca.getName());
assertThat(info.description).isEqualTo(ca.getDescription());
assertThat(info.url).isEqualTo(ca.getAgreementUrl());
if (ca.getAutoVerify() != null) {
assertThat(info.autoVerifyGroup.name)
.isEqualTo(ca.getAutoVerify().getName());
} else {
assertThat(info.autoVerifyGroup).isNull();
}
}
private ChangeInput newChangeInput() {
ChangeInput in = new ChangeInput();
in.branch = "master";
in.subject = "test";
in.project = project.get();
return in;
} |
<<<<<<<
@UiField ToggleButton skipDeleted;
@UiField ToggleButton skipUnchanged;
@UiField ToggleButton skipUncommented;
=======
@UiField ToggleButton lineWrapping;
>>>>>>>
@UiField ToggleButton lineWrapping;
@UiField ToggleButton skipDeleted;
@UiField ToggleButton skipUnchanged;
@UiField ToggleButton skipUncommented;
<<<<<<<
skipDeleted.setValue(!prefs.skipDeleted());
skipUnchanged.setValue(!prefs.skipUnchanged());
skipUncommented.setValue(!prefs.skipUncommented());
=======
lineWrapping.setValue(prefs.lineWrapping());
>>>>>>>
lineWrapping.setValue(prefs.lineWrapping());
skipDeleted.setValue(!prefs.skipDeleted());
skipUnchanged.setValue(!prefs.skipUnchanged());
skipUncommented.setValue(!prefs.skipUncommented());
<<<<<<<
@UiHandler("skipDeleted")
void onSkipDeleted(ValueChangeEvent<Boolean> e) {
prefs.skipDeleted(!e.getValue());
// TODO: Update the navigation links on the current DiffScreen
}
@UiHandler("skipUnchanged")
void onSkipUnchanged(ValueChangeEvent<Boolean> e) {
prefs.skipUnchanged(!e.getValue());
// TODO: Update the navigation links on the current DiffScreen
}
@UiHandler("skipUncommented")
void onSkipUncommented(ValueChangeEvent<Boolean> e) {
prefs.skipUncommented(!e.getValue());
// TODO: Update the navigation links on the current DiffScreen
}
=======
@UiHandler("lineWrapping")
void onLineWrapping(ValueChangeEvent<Boolean> e) {
prefs.lineWrapping(e.getValue());
view.getCmFromSide(DisplaySide.A).setOption("lineWrapping",
prefs.lineWrapping());
view.getCmFromSide(DisplaySide.B).setOption("lineWrapping",
prefs.lineWrapping());
}
>>>>>>>
@UiHandler("lineWrapping")
void onLineWrapping(ValueChangeEvent<Boolean> e) {
prefs.lineWrapping(e.getValue());
view.getCmFromSide(DisplaySide.A).setOption("lineWrapping",
prefs.lineWrapping());
view.getCmFromSide(DisplaySide.B).setOption("lineWrapping",
prefs.lineWrapping());
}
@UiHandler("skipDeleted")
void onSkipDeleted(ValueChangeEvent<Boolean> e) {
prefs.skipDeleted(!e.getValue());
// TODO: Update the navigation links on the current DiffScreen
}
@UiHandler("skipUnchanged")
void onSkipUnchanged(ValueChangeEvent<Boolean> e) {
prefs.skipUnchanged(!e.getValue());
// TODO: Update the navigation links on the current DiffScreen
}
@UiHandler("skipUncommented")
void onSkipUncommented(ValueChangeEvent<Boolean> e) {
prefs.skipUncommented(!e.getValue());
// TODO: Update the navigation links on the current DiffScreen
} |
<<<<<<<
import com.google.gerrit.extensions.api.groups.GroupInput;
import com.google.gerrit.extensions.api.projects.BranchApi;
import com.google.gerrit.extensions.api.projects.BranchInput;
=======
import com.google.gerrit.extensions.api.projects.BranchApi;
import com.google.gerrit.extensions.api.projects.BranchInput;
>>>>>>>
import com.google.gerrit.extensions.api.groups.GroupInput;
import com.google.gerrit.extensions.api.projects.BranchApi;
import com.google.gerrit.extensions.api.projects.BranchInput;
<<<<<<<
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.PatchSet;
=======
import com.google.gerrit.reviewdb.client.Branch;
>>>>>>>
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.PatchSet;
<<<<<<<
protected PushOneCommit.Result createDraftChange() throws Exception {
return pushTo("refs/drafts/master");
}
protected BranchApi createBranch(Branch.NameKey branch) throws Exception {
return gApi.projects()
.name(branch.getParentKey().get())
.branch(branch.get())
.create(new BranchInput());
}
=======
protected BranchApi createBranchWithRevision(Branch.NameKey branch,
String revision) throws Exception {
BranchInput in = new BranchInput();
in.revision = revision;
return gApi.projects()
.name(branch.getParentKey().get())
.branch(branch.get())
.create(in);
}
>>>>>>>
protected PushOneCommit.Result createDraftChange() throws Exception {
return pushTo("refs/drafts/master");
}
protected BranchApi createBranch(Branch.NameKey branch) throws Exception {
return gApi.projects()
.name(branch.getParentKey().get())
.branch(branch.get())
.create(new BranchInput());
}
protected BranchApi createBranchWithRevision(Branch.NameKey branch,
String revision) throws Exception {
BranchInput in = new BranchInput();
in.revision = revision;
return gApi.projects()
.name(branch.getParentKey().get())
.branch(branch.get())
.create(in);
} |
<<<<<<<
c = ChangeNotes.readOneReviewDbChange(db, id);
=======
c = ReviewDbUtil.unwrapDb(db).changes().get(id);
if (c == null) {
logDebug("Failed to get change {} from unwrapped db", id);
throw new NoSuchChangeException(id);
}
>>>>>>>
c = ChangeNotes.readOneReviewDbChange(db, id);
if (c == null) {
logDebug("Failed to get change {} from unwrapped db", id);
throw new NoSuchChangeException(id);
} |
<<<<<<<
// This is a hack to detect an operator that requires authentication.
Pattern p =
Pattern.compile("^Error in operator (.*:self|is:watched|is:owner|is:reviewer|has:.*)$");
Matcher m = p.matcher(e.getMessage());
if (m.matches()) {
String op = m.group(1);
throw new AuthException("Must be signed-in to use " + op);
}
log.debug("Reject change query with 400 Bad Request: " + queries, e);
=======
>>>>>>>
log.debug("Reject change query with 400 Bad Request: " + queries, e); |
<<<<<<<
matches.add(id);
=======
if (visibilityControl.isVisibleTo(id)) {
matches.put(id, accountLoader.get(id));
}
>>>>>>>
if (visibilityControl.isVisibleTo(id)) {
matches.add(id);
} |
<<<<<<<
protected void merge(PushOneCommit.Result r) throws Exception {
revision(r).review(ReviewInput.approve());
revision(r).submit();
}
=======
protected PushOneCommit.Result amendChangeAsDraft(String changeId)
throws Exception {
return amendChange(changeId, "refs/drafts/master");
}
>>>>>>>
protected void merge(PushOneCommit.Result r) throws Exception {
revision(r).review(ReviewInput.approve());
revision(r).submit();
}
protected PushOneCommit.Result amendChangeAsDraft(String changeId)
throws Exception {
return amendChange(changeId, "refs/drafts/master");
} |
<<<<<<<
=======
import com.google.gerrit.acceptance.RestResponse;
import com.google.gerrit.acceptance.Sandboxed;
>>>>>>>
<<<<<<<
=======
import com.google.gerrit.extensions.api.projects.BranchInfo;
import com.google.gerrit.extensions.api.projects.BranchInput;
>>>>>>>
import com.google.gerrit.extensions.api.projects.BranchInput;
<<<<<<<
@NoHttpd
=======
@Sandboxed
>>>>>>>
@NoHttpd
<<<<<<<
String expectedTopic = name(topic);
change1.assertChange(Change.Status.MERGED, expectedTopic, admin);
change2.assertChange(Change.Status.MERGED, expectedTopic, admin);
change3.assertChange(Change.Status.MERGED, expectedTopic, admin);
=======
change1.assertChange(Change.Status.MERGED, topic, admin);
change2.assertChange(Change.Status.MERGED, topic, admin);
change3.assertChange(Change.Status.MERGED, topic, admin);
>>>>>>>
String expectedTopic = name(topic);
change1.assertChange(Change.Status.MERGED, expectedTopic, admin);
change2.assertChange(Change.Status.MERGED, expectedTopic, admin);
change3.assertChange(Change.Status.MERGED, expectedTopic, admin);
<<<<<<<
"Merge changes from topic '" + expectedTopic + "'");
=======
"Merge changes from topic '" + topic + "'");
>>>>>>>
"Merge changes from topic '" + expectedTopic + "'"); |
<<<<<<<
batchRefUpdate = repoView.getRepository().getRefDatabase().newBatchUpdate();
batchRefUpdate.setPushCertificate(pushCert);
batchRefUpdate.setRefLogMessage(refLogMessage, true);
batchRefUpdate.setAllowNonFastForwards(true);
repoView.getCommands().addTo(batchRefUpdate);
=======
batchRefUpdate = repo.getRefDatabase().newBatchUpdate();
batchRefUpdate.setRefLogMessage(refLogMessage, true);
if (user.isIdentifiedUser()) {
batchRefUpdate.setRefLogIdent(user.asIdentifiedUser().newRefLogIdent(when, tz));
}
commands.addTo(batchRefUpdate);
>>>>>>>
batchRefUpdate = repoView.getRepository().getRefDatabase().newBatchUpdate();
batchRefUpdate.setPushCertificate(pushCert);
batchRefUpdate.setRefLogMessage(refLogMessage, true);
batchRefUpdate.setAllowNonFastForwards(true);
repoView.getCommands().addTo(batchRefUpdate);
if (user.isIdentifiedUser()) {
batchRefUpdate.setRefLogIdent(user.asIdentifiedUser().newRefLogIdent(when, tz));
} |
<<<<<<<
CodeReviewRevWalk revWalk = CodeReviewCommit.newRevWalk(oi.newReader())) {
Ref destRef = git.getRefDatabase().exactRef(targetRef);
=======
ObjectReader reader = oi.newReader();
CodeReviewRevWalk revWalk = CodeReviewCommit.newRevWalk(reader)) {
Ref destRef = git.getRefDatabase().exactRef(ref);
>>>>>>>
ObjectReader reader = oi.newReader();
CodeReviewRevWalk revWalk = CodeReviewCommit.newRevWalk(reader)) {
Ref destRef = git.getRefDatabase().exactRef(targetRef); |
<<<<<<<
private final IdentifiedUser.GenericFactory identifiedUserFactory;
private final AgreementJson agreementJson;
=======
>>>>>>>
private final AgreementJson agreementJson;
<<<<<<<
IdentifiedUser.GenericFactory identifiedUserFactory,
AgreementJson agreementJson,
=======
>>>>>>>
AgreementJson agreementJson,
<<<<<<<
this.identifiedUserFactory = identifiedUserFactory;
this.agreementJson = agreementJson;
=======
>>>>>>>
this.agreementJson = agreementJson; |
<<<<<<<
=======
import com.google.gerrit.reviewdb.RefRight;
import com.google.gerrit.reviewdb.SystemConfig;
>>>>>>>
<<<<<<<
import java.util.Collection;
=======
import java.util.Collections;
>>>>>>>
import java.util.Collection;
import java.util.Collections;
<<<<<<<
private final Set<AccountGroup.UUID> uploadGroups;
private final Set<AccountGroup.UUID> receiveGroups;
=======
private final SystemConfig systemConfig;
private final Set<AccountGroup.Id> uploadGroups;
private final Set<AccountGroup.Id> receiveGroups;
>>>>>>>
private final Set<AccountGroup.UUID> uploadGroups;
private final Set<AccountGroup.UUID> receiveGroups;
<<<<<<<
ProjectControl(@GitUploadPackGroups Set<AccountGroup.UUID> uploadGroups,
@GitReceivePackGroups Set<AccountGroup.UUID> receiveGroups,
=======
ProjectControl(final SystemConfig systemConfig,
@GitUploadPackGroups Set<AccountGroup.Id> uploadGroups,
@GitReceivePackGroups Set<AccountGroup.Id> receiveGroups,
>>>>>>>
ProjectControl(@GitUploadPackGroups Set<AccountGroup.UUID> uploadGroups,
@GitReceivePackGroups Set<AccountGroup.UUID> receiveGroups,
<<<<<<<
private boolean canPerformOnAnyRef(String permissionName) {
final Set<AccountGroup.UUID> groups = user.getEffectiveGroups();
=======
// TODO (anatol.pomazau): Try to merge this method with similar RefRightsForPattern#canPerform
private boolean canPerformOnAnyRef(ApprovalCategory.Id actionId,
short requireValue) {
final Set<AccountGroup.Id> groups = getEffectiveUserGroups();
>>>>>>>
/**
* @return the effective groups of the current user for this project
*/
private Set<AccountGroup.UUID> getEffectiveUserGroups() {
final Set<AccountGroup.UUID> userGroups = user.getEffectiveGroups();
if (isOwner()) {
final Set<AccountGroup.UUID> userGroupsOnProject =
new HashSet<AccountGroup.UUID>(userGroups.size() + 1);
userGroupsOnProject.addAll(userGroups);
userGroupsOnProject.add(AccountGroup.PROJECT_OWNERS);
return Collections.unmodifiableSet(userGroupsOnProject);
} else {
return userGroups;
}
}
private boolean canPerformOnAnyRef(String permissionName) {
final Set<AccountGroup.UUID> groups = getEffectiveUserGroups();
<<<<<<<
private boolean canPerformOnAllRefs(String permission) {
=======
/**
* @return the effective groups of the current user for this project
*/
private Set<AccountGroup.Id> getEffectiveUserGroups() {
final Set<AccountGroup.Id> userGroups = user.getEffectiveGroups();
if (isOwner()) {
final Set<AccountGroup.Id> userGroupsOnProject =
new HashSet<AccountGroup.Id>(userGroups.size() + 1);
userGroupsOnProject.addAll(userGroups);
userGroupsOnProject.add(systemConfig.ownerGroupId);
return Collections.unmodifiableSet(userGroupsOnProject);
} else {
return userGroups;
}
}
private boolean canPerformOnAllRefs(ApprovalCategory.Id actionId,
short requireValue) {
>>>>>>>
private boolean canPerformOnAllRefs(String permission) { |
<<<<<<<
pushFactory.create(db, ident, testRepo, PushOneCommit.SUBJECT, FILE_NAME,
new String(CONTENT_OLD));
return push.to("refs/for/master").getChangeId();
=======
pushFactory.create(db, ident, PushOneCommit.SUBJECT, FILE_NAME,
new String(CONTENT_OLD, StandardCharsets.UTF_8));
return push.to(git, "refs/for/master").getChangeId();
>>>>>>>
pushFactory.create(db, ident, testRepo, PushOneCommit.SUBJECT, FILE_NAME,
new String(CONTENT_OLD, StandardCharsets.UTF_8));
return push.to("refs/for/master").getChangeId();
<<<<<<<
pushFactory.create(db, ident, testRepo, PushOneCommit.SUBJECT, FILE_NAME2,
new String(CONTENT_NEW2), changeId);
return push.to("refs/for/master").getChangeId();
=======
pushFactory.create(db, ident, PushOneCommit.SUBJECT, FILE_NAME2,
new String(CONTENT_NEW2, StandardCharsets.UTF_8), changeId);
return push.to(git, "refs/for/master").getChangeId();
>>>>>>>
pushFactory.create(db, ident, testRepo, PushOneCommit.SUBJECT, FILE_NAME2,
new String(CONTENT_NEW2, StandardCharsets.UTF_8), changeId);
return push.to("refs/for/master").getChangeId();
<<<<<<<
pushFactory.create(db, ident, testRepo, PushOneCommit.SUBJECT, FILE_NAME,
new String(CONTENT_OLD));
return push.rm("refs/for/master").getChangeId();
=======
pushFactory.create(db, ident, PushOneCommit.SUBJECT, FILE_NAME,
new String(CONTENT_OLD, StandardCharsets.UTF_8));
return push.rm(git, "refs/for/master").getChangeId();
>>>>>>>
pushFactory.create(db, ident, testRepo, PushOneCommit.SUBJECT, FILE_NAME,
new String(CONTENT_OLD, StandardCharsets.UTF_8));
return push.rm("refs/for/master").getChangeId(); |
<<<<<<<
import com.sun.management.OperatingSystemMXBean;
import com.sun.management.UnixOperatingSystemMXBean;
=======
>>>>>>>
<<<<<<<
if (sys instanceof UnixOperatingSystemMXBean) {
final UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) sys;
if (unix.getOpenFileDescriptorCount() != -1) {
metrics.newCallbackMetric(
"proc/num_open_fds",
Long.class,
new Description("Number of open file descriptors").setGauge().setUnit("fds"),
new Supplier<Long>() {
@Override
public Long get() {
return unix.getOpenFileDescriptorCount();
}
});
}
=======
if (provider.getOpenFileDescriptorCount() != -1) {
metrics.newCallbackMetric(
"proc/num_open_fds",
Long.class,
new Description("Number of open file descriptors")
.setGauge()
.setUnit("fds"),
new Supplier<Long>() {
@Override
public Long get() {
return provider.getOpenFileDescriptorCount();
}
});
>>>>>>>
if (provider.getOpenFileDescriptorCount() != -1) {
metrics.newCallbackMetric(
"proc/num_open_fds",
Long.class,
new Description("Number of open file descriptors").setGauge().setUnit("fds"),
new Supplier<Long>() {
@Override
public Long get() {
return provider.getOpenFileDescriptorCount();
}
}); |
<<<<<<<
command("git-upload-pack").to(Commands.key(git, "upload-pack"));
command(git, "upload-pack").to(Upload.class);
command("git-upload-archive").to(Commands.key(git, "upload-archive"));
command(git, "upload-archive").to(UploadArchive.class);
=======
if (sshEnabled()) {
command("git-upload-pack").to(Commands.key(git, "upload-pack"));
command(git, "upload-pack").to(Upload.class);
}
>>>>>>>
if (sshEnabled()) {
command("git-upload-pack").to(Commands.key(git, "upload-pack"));
command(git, "upload-pack").to(Upload.class);
command("git-upload-archive").to(Commands.key(git, "upload-archive"));
command(git, "upload-archive").to(UploadArchive.class);
}
<<<<<<<
if (slaveMode) {
command("git-receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command("gerrit-receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command(git, "receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command(gerrit, "test-submit").to(NotSupportedInSlaveModeFailureCommand.class);
} else {
command("git-receive-pack").to(Commands.key(git, "receive-pack"));
command("gerrit-receive-pack").to(Commands.key(git, "receive-pack"));
command(git, "receive-pack").to(Commands.key(gerrit, "receive-pack"));
=======
if (!slaveMode) {
if (sshEnabled()) {
command("git-receive-pack").to(Commands.key(git, "receive-pack"));
command("gerrit-receive-pack").to(Commands.key(git, "receive-pack"));
command(git, "receive-pack").to(Commands.key(gerrit, "receive-pack"));
}
>>>>>>>
if (slaveMode) {
command("git-receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command("gerrit-receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command(git, "receive-pack").to(NotSupportedInSlaveModeFailureCommand.class);
command(gerrit, "test-submit").to(NotSupportedInSlaveModeFailureCommand.class);
} else {
if (sshEnabled()) {
command("git-receive-pack").to(Commands.key(git, "receive-pack"));
command("gerrit-receive-pack").to(Commands.key(git, "receive-pack"));
command(git, "receive-pack").to(Commands.key(gerrit, "receive-pack"));
} |
<<<<<<<
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.gerrit.common.FileUtil;
=======
import com.google.common.base.Preconditions;
>>>>>>>
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.base.Preconditions;
import com.google.gerrit.common.FileUtil; |
<<<<<<<
"keys.keyData.publicKeyPath",
"keys.keyData.azureKeyVaultId",
"keys.azureKeyVaultConfig.url",
=======
// "keys.keyData.publicKeyPath",
>>>>>>>
// "keys.keyData.publicKeyPath",
"keys.keyData.azureKeyVaultId",
"keys.azureKeyVaultConfig.url", |
<<<<<<<
=======
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.gerrit.common.TimeUtil;
>>>>>>>
<<<<<<<
=======
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.revwalk.RevCommit;
>>>>>>>
import org.eclipse.jgit.revwalk.RevCommit;
<<<<<<<
public List<SubmitStrategyOp> buildOps(
Collection<CodeReviewCommit> toMerge) throws IntegrationException {
List<CodeReviewCommit> sorted = sort(toMerge);
List<SubmitStrategyOp> ops = new ArrayList<>(sorted.size());
boolean first = true;
=======
protected MergeTip _run(final CodeReviewCommit branchTip,
final Collection<CodeReviewCommit> toMerge) throws IntegrationException {
MergeTip mergeTip = new MergeTip(branchTip, toMerge);
List<CodeReviewCommit> sorted = sort(toMerge, branchTip);
>>>>>>>
public List<SubmitStrategyOp> buildOps(
Collection<CodeReviewCommit> toMerge) throws IntegrationException {
List<CodeReviewCommit> sorted = sort(toMerge, args.mergeTip.getCurrentTip());
List<SubmitStrategyOp> ops = new ArrayList<>(sorted.size());
boolean first = true;
<<<<<<<
private class RebaseRootOp extends SubmitStrategyOp {
private RebaseRootOp(CodeReviewCommit toMerge) {
super(RebaseIfNecessary.this.args, toMerge);
}
@Override
public void updateRepoImpl(RepoContext ctx) {
// Refuse to merge a root commit into an existing branch, we cannot obtain
// a delta for the cherry-pick to apply.
toMerge.setStatusCode(CommitMergeStatus.CANNOT_REBASE_ROOT);
}
}
private class RebaseOneOp extends SubmitStrategyOp {
private RebaseChangeOp rebaseOp;
private CodeReviewCommit newCommit;
private RebaseOneOp(CodeReviewCommit toMerge) {
super(RebaseIfNecessary.this.args, toMerge);
}
@Override
public void updateRepoImpl(RepoContext ctx)
throws IntegrationException, InvalidChangeOperationException,
RestApiException, IOException, OrmException {
// TODO(dborowitz): args.rw is needed because it's a CodeReviewRevWalk.
// When hoisting BatchUpdate into MergeOp, we will need to teach
// BatchUpdate how to produce CodeReviewRevWalks.
if (args.mergeUtil.canFastForward(args.mergeSorter,
args.mergeTip.getCurrentTip(), args.rw, toMerge)) {
args.mergeTip.moveTipTo(amendGitlink(toMerge), toMerge);
acceptMergeTip(args.mergeTip);
return;
}
// Stale read of patch set is ok; see comments in RebaseChangeOp.
PatchSet origPs = args.psUtil.get(
ctx.getDb(), toMerge.getControl().getNotes(), toMerge.getPatchsetId());
rebaseOp = args.rebaseFactory.create(
toMerge.getControl(), origPs, args.mergeTip.getCurrentTip().name())
.setFireRevisionCreated(false)
// Bypass approval copier since SubmitStrategyOp copy all approvals
// later anyway.
.setCopyApprovals(false)
.setValidatePolicy(CommitValidators.Policy.NONE);
try {
rebaseOp.updateRepo(ctx);
} catch (MergeConflictException | NoSuchChangeException e) {
toMerge.setStatusCode(CommitMergeStatus.REBASE_MERGE_CONFLICT);
throw new IntegrationException(
"Cannot rebase " + toMerge.name() + ": " + e.getMessage(), e);
}
newCommit = args.rw.parseCommit(rebaseOp.getRebasedCommit());
newCommit = amendGitlink(newCommit);
newCommit.copyFrom(toMerge);
newCommit.setStatusCode(CommitMergeStatus.CLEAN_REBASE);
newCommit.setPatchsetId(rebaseOp.getPatchSetId());
args.mergeTip.moveTipTo(newCommit, newCommit);
args.commits.put(args.mergeTip.getCurrentTip());
acceptMergeTip(args.mergeTip);
}
@Override
public PatchSet updateChangeImpl(ChangeContext ctx)
throws NoSuchChangeException, ResourceConflictException,
OrmException, IOException {
if (rebaseOp == null) {
// Took the fast-forward option, nothing to do.
return null;
=======
} else if (n.getParentCount() == 1) {
if (args.mergeUtil.canFastForward(args.mergeSorter,
mergeTip.getCurrentTip(), args.rw, n)) {
n.setStatusCode(CommitMergeStatus.CLEAN_MERGE);
mergeTip.moveTipTo(n, n);
} else {
try {
PatchSet newPatchSet = rebase(n, mergeTip);
List<PatchSetApproval> approvals = Lists.newArrayList();
for (PatchSetApproval a : args.approvalsUtil.byPatchSet(args.db,
n.getControl(), n.getPatchsetId())) {
approvals.add(new PatchSetApproval(newPatchSet.getId(), a));
}
// rebaseChange.rebase() may already have copied some approvals,
// use upsert, not insert, to avoid constraint violation on database
args.db.patchSetApprovals().upsert(approvals);
CodeReviewCommit newTip = args.rw.parseCommit(
ObjectId.fromString(newPatchSet.getRevision().get()));
mergeTip.moveTipTo(newTip, newTip);
n.change().setCurrentPatchSet(
patchSetInfoFactory.get(args.rw, mergeTip.getCurrentTip(),
newPatchSet.getId()));
mergeTip.getCurrentTip().copyFrom(n);
mergeTip.getCurrentTip().setControl(
args.changeControlFactory.controlFor(n.change(), args.caller));
mergeTip.getCurrentTip().setPatchsetId(newPatchSet.getId());
mergeTip.getCurrentTip().setStatusCode(
CommitMergeStatus.CLEAN_REBASE);
newCommits.put(newPatchSet.getId().getParentKey(),
mergeTip.getCurrentTip());
setRefLogIdent();
} catch (MergeConflictException e) {
n.setStatusCode(CommitMergeStatus.REBASE_MERGE_CONFLICT);
throw new IntegrationException(
"Cannot rebase " + n.name() + ": " + e.getMessage(), e);
} catch (NoSuchChangeException | OrmException | IOException
| RestApiException | UpdateException e) {
throw new IntegrationException("Cannot rebase " + n.name(), e);
}
}
} else if (n.getParentCount() > 1) {
// There are multiple parents, so this is a merge commit. We
// don't want to rebase the merge as clients can't easily
// rebase their history with that merge present and replaced
// by an equivalent merge with a different first parent. So
// instead behave as though MERGE_IF_NECESSARY was configured.
//
try {
if (args.rw.isMergedInto(mergeTip.getCurrentTip(), n)) {
mergeTip.moveTipTo(n, n);
} else {
PersonIdent myIdent = getSubmitterIdent();
mergeTip.moveTipTo(
args.mergeUtil.mergeOneCommit(myIdent, myIdent,
args.repo, args.rw, args.inserter, args.canMergeFlag,
args.destBranch, mergeTip.getCurrentTip(), n), n);
}
RevCommit initialTip = mergeTip.getInitialTip();
args.mergeUtil.markCleanMerges(args.rw, args.canMergeFlag,
mergeTip.getCurrentTip(), initialTip == null ?
ImmutableSet.<RevCommit>of() : ImmutableSet.of(initialTip));
setRefLogIdent();
} catch (IOException e) {
throw new IntegrationException("Cannot merge " + n.name(), e);
}
>>>>>>>
private class RebaseRootOp extends SubmitStrategyOp {
private RebaseRootOp(CodeReviewCommit toMerge) {
super(RebaseIfNecessary.this.args, toMerge);
}
@Override
public void updateRepoImpl(RepoContext ctx) {
// Refuse to merge a root commit into an existing branch, we cannot obtain
// a delta for the cherry-pick to apply.
toMerge.setStatusCode(CommitMergeStatus.CANNOT_REBASE_ROOT);
}
}
private class RebaseOneOp extends SubmitStrategyOp {
private RebaseChangeOp rebaseOp;
private CodeReviewCommit newCommit;
private RebaseOneOp(CodeReviewCommit toMerge) {
super(RebaseIfNecessary.this.args, toMerge);
}
@Override
public void updateRepoImpl(RepoContext ctx)
throws IntegrationException, InvalidChangeOperationException,
RestApiException, IOException, OrmException {
// TODO(dborowitz): args.rw is needed because it's a CodeReviewRevWalk.
// When hoisting BatchUpdate into MergeOp, we will need to teach
// BatchUpdate how to produce CodeReviewRevWalks.
if (args.mergeUtil.canFastForward(args.mergeSorter,
args.mergeTip.getCurrentTip(), args.rw, toMerge)) {
args.mergeTip.moveTipTo(amendGitlink(toMerge), toMerge);
acceptMergeTip(args.mergeTip);
return;
}
// Stale read of patch set is ok; see comments in RebaseChangeOp.
PatchSet origPs = args.psUtil.get(
ctx.getDb(), toMerge.getControl().getNotes(), toMerge.getPatchsetId());
rebaseOp = args.rebaseFactory.create(
toMerge.getControl(), origPs, args.mergeTip.getCurrentTip().name())
.setFireRevisionCreated(false)
// Bypass approval copier since SubmitStrategyOp copy all approvals
// later anyway.
.setCopyApprovals(false)
.setValidatePolicy(CommitValidators.Policy.NONE);
try {
rebaseOp.updateRepo(ctx);
} catch (MergeConflictException | NoSuchChangeException e) {
toMerge.setStatusCode(CommitMergeStatus.REBASE_MERGE_CONFLICT);
throw new IntegrationException(
"Cannot rebase " + toMerge.name() + ": " + e.getMessage(), e);
}
newCommit = args.rw.parseCommit(rebaseOp.getRebasedCommit());
newCommit = amendGitlink(newCommit);
newCommit.copyFrom(toMerge);
newCommit.setStatusCode(CommitMergeStatus.CLEAN_REBASE);
newCommit.setPatchsetId(rebaseOp.getPatchSetId());
args.mergeTip.moveTipTo(newCommit, newCommit);
args.commits.put(args.mergeTip.getCurrentTip());
acceptMergeTip(args.mergeTip);
}
@Override
public PatchSet updateChangeImpl(ChangeContext ctx)
throws NoSuchChangeException, ResourceConflictException,
OrmException, IOException {
if (rebaseOp == null) {
// Took the fast-forward option, nothing to do.
return null; |
<<<<<<<
private final PatchSetInfoFactory psInfoFactory;
=======
private final PersonIdent myIdent;
>>>>>>>
private final PatchSetInfoFactory psInfoFactory;
private final PersonIdent myIdent;
<<<<<<<
final PatchSetInfoFactory psif,
PatchListCache patchListCache, SchemaFactory<ReviewDb> schema) {
=======
PatchListCache patchListCache, SchemaFactory<ReviewDb> schema,
@GerritPersonIdent PersonIdent myIdent) {
>>>>>>>
final PatchSetInfoFactory psif,
PatchListCache patchListCache, SchemaFactory<ReviewDb> schema,
@GerritPersonIdent PersonIdent myIdent) {
<<<<<<<
this.psInfoFactory = psif;
=======
this.myIdent = myIdent;
>>>>>>>
this.psInfoFactory = psif;
this.myIdent = myIdent; |
<<<<<<<
private final Provider<InternalAccountQuery> accountQueryProvider;
private final ExternalIds externalIds;
=======
private final AuditService auditService;
>>>>>>>
private final ExternalIds externalIds;
<<<<<<<
Provider<InternalAccountQuery> accountQueryProvider,
ExternalIds externalIds,
ExternalIdsUpdate.Server externalIdsUpdateFactory,
GroupsUpdate.Factory groupsUpdateFactory,
SetInactiveFlag setInactiveFlag) {
=======
AuditService auditService,
ExternalIdsUpdate.Server externalIdsUpdateFactory) {
>>>>>>>
ExternalIds externalIds,
ExternalIdsUpdate.Server externalIdsUpdateFactory,
GroupsUpdate.Factory groupsUpdateFactory,
SetInactiveFlag setInactiveFlag) {
<<<<<<<
this.awaitsFirstAccountCheck =
new AtomicBoolean(cfg.getBoolean("capability", "makeFirstUserAdmin", true));
this.accountQueryProvider = accountQueryProvider;
this.externalIds = externalIds;
=======
this.awaitsFirstAccountCheck = new AtomicBoolean(true);
this.auditService = auditService;
>>>>>>>
this.awaitsFirstAccountCheck =
new AtomicBoolean(cfg.getBoolean("capability", "makeFirstUserAdmin", true));
this.externalIds = externalIds; |
<<<<<<<
return cmd + " " + path + userName;
=======
return cmd + " " + path;
} else {
return req.getMethod() + " " + uri;
>>>>>>>
return cmd + " " + path; |
<<<<<<<
import think.rpgitems.support.WGSupport;
=======
import think.rpgitems.utils.Pair;
>>>>>>>
import think.rpgitems.support.WGSupport;
import think.rpgitems.utils.Pair; |
<<<<<<<
import static org.assertj.core.api.Assertions.catchThrowable;
=======
import org.junit.After;
import org.junit.Before;
>>>>>>>
import org.junit.After;
import org.junit.Before;
import static org.assertj.core.api.Assertions.catchThrowable;
<<<<<<<
CliResult result = LegacyCliAdapter.writeToOutputFile(config, outputPath);
=======
CliResult result = LegacyCliAdapter.writeToOutputFile(config, outputPath);
>>>>>>>
CliResult result = LegacyCliAdapter.writeToOutputFile(config, outputPath); |
<<<<<<<
private TitleProvider mTitleProvider;
private int mCurrentPage = -1;
=======
private PagerAdapter mPagerAdapter;
private int mCurrentPage;
>>>>>>>
private PagerAdapter mPagerAdapter;
private int mCurrentPage = -1; |
<<<<<<<
String LAYOUT_DRAWER = "/layout/drawer";
String LAYOUT_VIEW_SYSTEM = "/layout/view_system";
String LAYOUT_VIEW_ANIMATE = "/layout/view_animate";
=======
String LAYOUT_SUPPORT_28 = "/layout/support_28";
String LAYOUT_BOTTOM_APP_BAR = "/layout/bottom_app_bar";
>>>>>>>
String LAYOUT_DRAWER = "/layout/drawer";
String LAYOUT_VIEW_SYSTEM = "/layout/view_system";
String LAYOUT_VIEW_ANIMATE = "/layout/view_animate";
String LAYOUT_SUPPORT_28 = "/layout/support_28";
String LAYOUT_BOTTOM_APP_BAR = "/layout/bottom_app_bar"; |
<<<<<<<
import com.github.tessera.config.KeyDataConfig;
=======
import com.github.tessera.config.PrivateKeyData;
>>>>>>>
import com.github.tessera.config.KeyDataConfig;
<<<<<<<
import java.util.stream.IntStream;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonReader;
=======
import java.util.stream.IntStream;
import java.util.stream.Stream;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonReader;
>>>>>>>
import java.util.stream.IntStream;
import javax.json.JsonObjectBuilder;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonReader;
<<<<<<<
.sslClientKeyStorePath(tlsserverkey)
.sslClientKeyStorePassword("")
.sslClientTrustStorePath(tlsservercert)
.knownClientsFile(tlsknownclients)
.knownServersFile(tlsknownservers)
=======
.sslClientKeyStorePath(tlsserverkey)
.sslClientKeyStorePassword("FIXME")
.sslClientTrustStorePath(tlsservercert)
.knownClientsFile("FIXME")
.knownServersFile("FIXME")
>>>>>>>
.sslClientKeyStorePath(tlsserverkey)
.sslClientKeyStorePassword("")
.sslClientTrustStorePath(tlsservercert)
.knownClientsFile(tlsknownclients)
.knownServersFile(tlsknownservers)
<<<<<<<
static List<KeyDataConfig> createPrivateKeyData(List<String> privateKeys, List<String> privateKeyPasswords) {
//Populate null values assume that they arent private
List<String> passwordList = new ArrayList<>(privateKeyPasswords);
for (int i = privateKeyPasswords.size() - 1; i < privateKeys.size(); i++) {
passwordList.add(null);
}
List<JsonObject> priavteKeyJson = privateKeys.stream()
.map(s -> Paths.get(s))
.map(path -> IOCallback.execute(() -> Files.newInputStream(path)))
.map(is -> Json.createReader(is))
.map(JsonReader::readObject)
.collect(Collectors.toList());
List<KeyDataConfig> privateKeyData = IntStream.range(0, priavteKeyJson.size())
//FIXME: Canyt set to null value.. need to use addNull("password")
.mapToObj(i -> {
final String password = passwordList.get(i);
final JsonObject keyDatC = Json.createObjectBuilder(priavteKeyJson.get(i)).build();
boolean isLocked = Objects.equals(keyDatC.getString("type"), "argon2sbox");
final JsonObject dataNode = keyDatC.getJsonObject("data");
final JsonObjectBuilder ammendedDataNode = Json.createObjectBuilder(dataNode);
if (isLocked) {
ammendedDataNode.add("password", Objects.requireNonNull(password, "Password is required."));
}
return Json.createObjectBuilder(keyDatC)
.remove("data")
.add("data", ammendedDataNode)
.build();
})
.map(JsonObject::toString)
.map(String::getBytes)
.map(ByteArrayInputStream::new)
.map(inputStream -> JaxbUtil.unmarshal(inputStream, KeyDataConfig.class))
.collect(Collectors.toList());
return Collections.unmodifiableList(privateKeyData);
}
=======
static List<PrivateKeyData> createPrivateKeyData(List<String> privateKeys, List<String> privateKeyPasswords) {
//Populate null values assume that they arent private
List<String> passwordList = new ArrayList<>(privateKeyPasswords);
for(int i = privateKeyPasswords.size() - 1; i < privateKeys.size();i++) {
passwordList.add(null);
}
List<JsonObject> priavteKeyJson = privateKeys.stream()
.map(s -> Paths.get(s))
.map(path -> IOCallback.execute(() -> Files.newInputStream(path)))
.map(is -> Json.createReader(is))
.map(JsonReader::readObject)
.collect(Collectors.toList());
List<PrivateKeyData> privateKeyData = IntStream.range(0, priavteKeyJson.size())
.mapToObj(i -> Json.createObjectBuilder(priavteKeyJson.get(i)).add("password",privateKeyPasswords.get(i)).build())
.map(JsonObject::toString)
.map(String::getBytes)
.map(ByteArrayInputStream::new)
.map(inputStream -> JaxbUtil.unmarshal(inputStream, PrivateKeyData.class))
.collect(Collectors.toList());
return Collections.unmodifiableList(privateKeyData);
}
>>>>>>>
static List<KeyDataConfig> createPrivateKeyData(List<String> privateKeys, List<String> privateKeyPasswords) {
//Populate null values assume that they arent private
List<String> passwordList = new ArrayList<>(privateKeyPasswords);
for (int i = privateKeyPasswords.size() - 1; i < privateKeys.size(); i++) {
passwordList.add(null);
}
List<JsonObject> priavteKeyJson = privateKeys.stream()
.map(s -> Paths.get(s))
.map(path -> IOCallback.execute(() -> Files.newInputStream(path)))
.map(is -> Json.createReader(is))
.map(JsonReader::readObject)
.collect(Collectors.toList());
List<KeyDataConfig> privateKeyData = IntStream.range(0, priavteKeyJson.size())
//FIXME: Canyt set to null value.. need to use addNull("password")
.mapToObj(i -> {
final String password = passwordList.get(i);
final JsonObject keyDatC = Json.createObjectBuilder(priavteKeyJson.get(i)).build();
boolean isLocked = Objects.equals(keyDatC.getString("type"), "argon2sbox");
final JsonObject dataNode = keyDatC.getJsonObject("data");
final JsonObjectBuilder ammendedDataNode = Json.createObjectBuilder(dataNode);
if (isLocked) {
ammendedDataNode.add("password", Objects.requireNonNull(password, "Password is required."));
}
return Json.createObjectBuilder(keyDatC)
.remove("data")
.add("data", ammendedDataNode)
.build();
})
.map(JsonObject::toString)
.map(String::getBytes)
.map(ByteArrayInputStream::new)
.map(inputStream -> JaxbUtil.unmarshal(inputStream, KeyDataConfig.class))
.collect(Collectors.toList());
return Collections.unmodifiableList(privateKeyData);
} |
<<<<<<<
import com.quorum.tessera.encryption.Nonce;
import com.quorum.tessera.partyinfo.ResendManager;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.data.EncryptedRawTransactionDAO;
import com.quorum.tessera.partyinfo.ResendResponse;
import com.quorum.tessera.partyinfo.ResendRequestType;
import com.quorum.tessera.partyinfo.ResendRequest;
=======
>>>>>>>
import com.quorum.tessera.encryption.Nonce;
import com.quorum.tessera.partyinfo.ResendManager;
import com.quorum.tessera.data.EncryptedTransactionDAO;
import com.quorum.tessera.data.EncryptedRawTransactionDAO;
import com.quorum.tessera.partyinfo.ResendResponse;
import com.quorum.tessera.partyinfo.ResendRequestType;
import com.quorum.tessera.partyinfo.ResendRequest;
<<<<<<<
import com.quorum.tessera.partyinfo.PublishPayloadException;
import com.quorum.tessera.transaction.exception.PrivacyViolationException;
=======
>>>>>>>
import com.quorum.tessera.partyinfo.PublishPayloadException;
import com.quorum.tessera.transaction.exception.PrivacyViolationException;
<<<<<<<
private Map<TxHash, EncodedPayload> buildAffectedContractTransactions(
PrivacyMode privacyMode, String[] affectedContractTransactionsList) {
if (Objects.isNull(affectedContractTransactionsList)) {
return Collections.emptyMap();
}
final Map<TxHash, EncodedPayload> affectedContractTransactions = new HashMap<>();
for (String affTxHashB64 : affectedContractTransactionsList) {
MessageHash affTxHash = new MessageHash(base64Decoder.decode(affTxHashB64));
Optional<EncryptedTransaction> affTx = this.encryptedTransactionDAO.retrieveByHash(affTxHash);
if (affTx.isPresent()) {
affectedContractTransactions.put(
new TxHash(affTxHash.getHashBytes()), payloadEncoder.decode(affTx.get().getEncodedPayload()));
} else {
throw new PrivacyViolationException("Unable to find affectedContractTransaction " + affTxHashB64);
}
}
return affectedContractTransactions;
}
private Map<TxHash, EncodedPayload> buildAffectedContractTransactions(
PrivacyMode privacyMode, Set<TxHash> txHashes) {
if (Objects.isNull(txHashes) || txHashes.isEmpty()) {
return Collections.emptyMap();
}
final Map<TxHash, EncodedPayload> affectedContractTransactions = new HashMap<>();
for (TxHash txHash : txHashes) {
MessageHash affTxHash = new MessageHash(txHash.getBytes());
Optional<EncryptedTransaction> affTx = this.encryptedTransactionDAO.retrieveByHash(affTxHash);
if (affTx.isPresent()) {
affectedContractTransactions.put(
new TxHash(affTxHash.getHashBytes()), payloadEncoder.decode(affTx.get().getEncodedPayload()));
} else {
LOGGER.debug("Unable to find affectedContractTransaction {}", txHash.encodeToBase64());
}
}
return affectedContractTransactions;
}
private boolean validatePrivacyMode(
Optional<MessageHash> txHash,
PrivacyMode privacyMode,
Map<TxHash, EncodedPayload> affectedContractTransactions) {
boolean result = true;
for (Map.Entry<TxHash, EncodedPayload> entry : affectedContractTransactions.entrySet()) {
final PrivacyMode affectedContractPrivacyMode = entry.getValue().getPrivacyMode();
if (affectedContractPrivacyMode != privacyMode) {
if (!txHash.isPresent()) {
throw new PrivacyViolationException(
"Private state validation flag mismatched with Affected Txn "
+ entry.getKey().encodeToBase64());
} else {
LOGGER.info(
"ACOTH {} has PrivacyMode={} for TX {} with PrivacyMode={}. Ignoring transaction.",
entry.getKey().encodeToBase64(),
affectedContractPrivacyMode.name(),
base64Decoder.encodeToString(txHash.get().getHashBytes()),
privacyMode.name());
result = false;
}
}
}
return result;
}
private boolean validateRecipients(
Optional<MessageHash> txHash,
List<PublicKey> recipientList,
Map<TxHash, EncodedPayload> affectedContractTransactions) {
for (Map.Entry<TxHash, EncodedPayload> entry : affectedContractTransactions.entrySet()) {
if (!entry.getValue().getRecipientKeys().containsAll(recipientList)
|| !recipientList.containsAll(entry.getValue().getRecipientKeys())) {
throw new PrivacyViolationException(
"Recipients mismatched for Affected Txn "
+ entry.getKey().encodeToBase64()
+ ". TxHash="
+ txHash.map(MessageHash::getHashBytes)
.map(base64Decoder::encodeToString)
.orElse("NONE"));
}
}
return true;
}
private boolean validateIfSenderIsGenuine(
MessageHash txHash, EncodedPayload payload, Map<TxHash, EncodedPayload> affectedContractTransactions) {
boolean result = true;
if (affectedContractTransactions.size() != payload.getAffectedContractTransactions().size()) {
// This could be a recipient discovery attack. Respond successfully while not saving the transaction.
LOGGER.info(
"Not all ACOTHs were found for inbound TX {}. Ignoring transaction.",
base64Decoder.encodeToString(txHash.getHashBytes()));
return false;
}
final PublicKey senderKey = payload.getSenderKey();
for (Map.Entry<TxHash, EncodedPayload> entry : affectedContractTransactions.entrySet()) {
if (!entry.getValue().getRecipientKeys().contains(senderKey)) {
LOGGER.info(
"Sender key {} for TX {} is not a recipient for ACOTH {}",
senderKey.encodeToBase64(),
base64Decoder.encodeToString(txHash.getHashBytes()),
entry.getKey().encodeToBase64());
result = false;
}
}
return result;
}
=======
@Override
@Transactional
public boolean isSender(final String key) {
final byte[] hashBytes = base64Decoder.decode(key);
final MessageHash hash = new MessageHash(hashBytes);
final EncodedPayload payload = this.fetchPayload(hash);
return enclave.getPublicKeys().contains(payload.getSenderKey());
}
@Override
@Transactional
public List<PublicKey> getParticipants(final String ptmHash) {
final byte[] hashBytes = base64Decoder.decode(ptmHash);
final MessageHash hash = new MessageHash(hashBytes);
final EncodedPayload payload = this.fetchPayload(hash);
// this includes the sender
return payload.getRecipientKeys();
}
private EncodedPayload fetchPayload(final MessageHash hash) {
return encryptedTransactionDAO
.retrieveByHash(hash)
.map(EncryptedTransaction::getEncodedPayload)
.map(payloadEncoder::decode)
.orElseThrow(() -> new TransactionNotFoundException("Message with hash " + hash + " was not found"));
}
>>>>>>>
private Map<TxHash, EncodedPayload> buildAffectedContractTransactions(
PrivacyMode privacyMode, String[] affectedContractTransactionsList) {
if (Objects.isNull(affectedContractTransactionsList)) {
return Collections.emptyMap();
}
final Map<TxHash, EncodedPayload> affectedContractTransactions = new HashMap<>();
for (String affTxHashB64 : affectedContractTransactionsList) {
MessageHash affTxHash = new MessageHash(base64Decoder.decode(affTxHashB64));
Optional<EncryptedTransaction> affTx = this.encryptedTransactionDAO.retrieveByHash(affTxHash);
if (affTx.isPresent()) {
affectedContractTransactions.put(
new TxHash(affTxHash.getHashBytes()), payloadEncoder.decode(affTx.get().getEncodedPayload()));
} else {
throw new PrivacyViolationException("Unable to find affectedContractTransaction " + affTxHashB64);
}
}
return affectedContractTransactions;
}
private Map<TxHash, EncodedPayload> buildAffectedContractTransactions(
PrivacyMode privacyMode, Set<TxHash> txHashes) {
if (Objects.isNull(txHashes) || txHashes.isEmpty()) {
return Collections.emptyMap();
}
final Map<TxHash, EncodedPayload> affectedContractTransactions = new HashMap<>();
for (TxHash txHash : txHashes) {
MessageHash affTxHash = new MessageHash(txHash.getBytes());
Optional<EncryptedTransaction> affTx = this.encryptedTransactionDAO.retrieveByHash(affTxHash);
if (affTx.isPresent()) {
affectedContractTransactions.put(
new TxHash(affTxHash.getHashBytes()), payloadEncoder.decode(affTx.get().getEncodedPayload()));
} else {
LOGGER.debug("Unable to find affectedContractTransaction {}", txHash.encodeToBase64());
}
}
return affectedContractTransactions;
}
private boolean validatePrivacyMode(
Optional<MessageHash> txHash,
PrivacyMode privacyMode,
Map<TxHash, EncodedPayload> affectedContractTransactions) {
boolean result = true;
for (Map.Entry<TxHash, EncodedPayload> entry : affectedContractTransactions.entrySet()) {
final PrivacyMode affectedContractPrivacyMode = entry.getValue().getPrivacyMode();
if (affectedContractPrivacyMode != privacyMode) {
if (!txHash.isPresent()) {
throw new PrivacyViolationException(
"Private state validation flag mismatched with Affected Txn "
+ entry.getKey().encodeToBase64());
} else {
LOGGER.info(
"ACOTH {} has PrivacyMode={} for TX {} with PrivacyMode={}. Ignoring transaction.",
entry.getKey().encodeToBase64(),
affectedContractPrivacyMode.name(),
base64Decoder.encodeToString(txHash.get().getHashBytes()),
privacyMode.name());
result = false;
}
}
}
return result;
}
private boolean validateRecipients(
Optional<MessageHash> txHash,
List<PublicKey> recipientList,
Map<TxHash, EncodedPayload> affectedContractTransactions) {
for (Map.Entry<TxHash, EncodedPayload> entry : affectedContractTransactions.entrySet()) {
if (!entry.getValue().getRecipientKeys().containsAll(recipientList)
|| !recipientList.containsAll(entry.getValue().getRecipientKeys())) {
throw new PrivacyViolationException(
"Recipients mismatched for Affected Txn "
+ entry.getKey().encodeToBase64()
+ ". TxHash="
+ txHash.map(MessageHash::getHashBytes)
.map(base64Decoder::encodeToString)
.orElse("NONE"));
}
}
return true;
}
private boolean validateIfSenderIsGenuine(
MessageHash txHash, EncodedPayload payload, Map<TxHash, EncodedPayload> affectedContractTransactions) {
boolean result = true;
if (affectedContractTransactions.size() != payload.getAffectedContractTransactions().size()) {
// This could be a recipient discovery attack. Respond successfully while not saving the transaction.
LOGGER.info(
"Not all ACOTHs were found for inbound TX {}. Ignoring transaction.",
base64Decoder.encodeToString(txHash.getHashBytes()));
return false;
}
final PublicKey senderKey = payload.getSenderKey();
for (Map.Entry<TxHash, EncodedPayload> entry : affectedContractTransactions.entrySet()) {
if (!entry.getValue().getRecipientKeys().contains(senderKey)) {
LOGGER.info(
"Sender key {} for TX {} is not a recipient for ACOTH {}",
senderKey.encodeToBase64(),
base64Decoder.encodeToString(txHash.getHashBytes()),
entry.getKey().encodeToBase64());
result = false;
}
}
return result;
}
@Override
@Transactional
public boolean isSender(final String key) {
final byte[] hashBytes = base64Decoder.decode(key);
final MessageHash hash = new MessageHash(hashBytes);
final EncodedPayload payload = this.fetchPayload(hash);
return enclave.getPublicKeys().contains(payload.getSenderKey());
}
@Override
@Transactional
public List<PublicKey> getParticipants(final String ptmHash) {
final byte[] hashBytes = base64Decoder.decode(ptmHash);
final MessageHash hash = new MessageHash(hashBytes);
final EncodedPayload payload = this.fetchPayload(hash);
// this includes the sender
return payload.getRecipientKeys();
}
private EncodedPayload fetchPayload(final MessageHash hash) {
return encryptedTransactionDAO
.retrieveByHash(hash)
.map(EncryptedTransaction::getEncodedPayload)
.map(payloadEncoder::decode)
.orElseThrow(() -> new TransactionNotFoundException("Message with hash " + hash + " was not found"));
} |
<<<<<<<
import com.couchbase.touchdb.TDMisc;
import com.couchbase.touchdb.TDRevision;
import com.couchbase.touchdb.TDRevisionList;
import com.couchbase.touchdb.TDStatus;
import com.couchbase.touchdb.replicator.changetracker.TDChangeTracker;
import com.couchbase.touchdb.replicator.changetracker.TDChangeTracker.TDChangeTrackerMode;
import com.couchbase.touchdb.replicator.changetracker.TDChangeTrackerClient;
import com.couchbase.touchdb.support.TDBatchProcessor;
import com.couchbase.touchdb.support.TDBatcher;
import com.couchbase.touchdb.support.TDRemoteRequestCompletionBlock;
public class TDPuller extends TDReplicator implements TDChangeTrackerClient {
private static final int MAX_OPEN_HTTP_CONNECTIONS = 8;
=======
import com.couchbase.touchdb.support.HttpClientFactory;
>>>>>>>
import com.couchbase.touchdb.TDMisc;
import com.couchbase.touchdb.TDRevision;
import com.couchbase.touchdb.TDRevisionList;
import com.couchbase.touchdb.TDStatus;
import com.couchbase.touchdb.replicator.changetracker.TDChangeTracker;
import com.couchbase.touchdb.replicator.changetracker.TDChangeTracker.TDChangeTrackerMode;
import com.couchbase.touchdb.replicator.changetracker.TDChangeTrackerClient;
import com.couchbase.touchdb.support.HttpClientFactory;
import com.couchbase.touchdb.support.TDBatchProcessor;
import com.couchbase.touchdb.support.TDBatcher;
import com.couchbase.touchdb.support.TDRemoteRequestCompletionBlock;
public class TDPuller extends TDReplicator implements TDChangeTrackerClient {
private static final int MAX_OPEN_HTTP_CONNECTIONS = 8;
<<<<<<<
super(db, remote, continuous);
}
public void setFilterName(String filterName) {
this.filterName = filterName;
}
public void setFilterParams(Map<String, Object> filterParams) {
this.filterParams = filterParams;
=======
this(db, remote, continuous, null);
// TODO Auto-generated constructor stub
}
public TDPuller(TDDatabase db, URL remote, boolean continuous, HttpClientFactory clientFactory) {
super(db, remote, continuous, clientFactory);
// TODO Auto-generated constructor stub
>>>>>>>
this(db, remote, continuous, null);
}
public TDPuller(TDDatabase db, URL remote, boolean continuous, HttpClientFactory clientFactory) {
super(db, remote, continuous, clientFactory);
}
public void setFilterName(String filterName) {
this.filterName = filterName;
}
public void setFilterParams(Map<String, Object> filterParams) {
this.filterParams = filterParams; |
<<<<<<<
@Override
public PublicKey[] getNodeKeys() {
return nodeKeys;
}
@Override
public PublicKey[] getAlwaysSendTo() {
return alwaysSendTo;
}
=======
@Override
public PublicKey[] alwaysSendTo() {
File[] alwaysSendTo = config.alwaysSendTo();
return Arrays.stream(alwaysSendTo).map(file -> readPublicKey(file)).toArray(PublicKey[]::new);
}
>>>>>>>
public PublicKey[] alwaysSendTo() {
File[] alwaysSendTo = config.alwaysSendTo();
return Arrays.stream(alwaysSendTo).map(file -> readPublicKey(file)).toArray(PublicKey[]::new);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.