conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
import org.jboss.netty.channel.ChannelUpstreamHandler;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.handler.codec.http.HttpHeaders;
=======
>>>>>>>
import org.jboss.netty.channel.Channels;
import org.jboss.netty.handler.codec.http.HttpHeaders;
<<<<<<<
import com.eucalyptus.component.annotation.AwsServiceName;
import com.eucalyptus.component.annotation.PublicService;
import com.eucalyptus.context.Contexts;
=======
>>>>>>>
import com.eucalyptus.component.annotation.PublicService;
<<<<<<<
import com.eucalyptus.ws.protocol.SoapHandler;
import com.google.common.base.Supplier;
=======
>>>>>>>
import com.eucalyptus.ws.protocol.SoapHandler;
import com.google.common.base.Supplier;
<<<<<<<
import edu.ucsb.eucalyptus.cloud.entities.SystemConfiguration;
import edu.ucsb.eucalyptus.msgs.BaseMessage;
=======
>>>>>>>
import edu.ucsb.eucalyptus.cloud.entities.SystemConfiguration; |
<<<<<<<
EucaButton submit_button;
=======
Button submit_button;
>>>>>>>
EucaButton submit_button;
<<<<<<<
EucaButton cancel_button = new EucaButton( "Cancel", DefaultPageEucaButtonListener );
=======
Button cancel_button = new Button( "Cancel", DefaultPageButtonListener );
>>>>>>>
EucaButton cancel_button = new EucaButton( "Cancel", DefaultPageEucaButtonListener );
<<<<<<<
EucaButton submit_button = new EucaButton ( "Recover Password", RecoverEucaButtonListener );
EucaButton cancel_button = new EucaButton ( "Cancel", DefaultPageEucaButtonListener );
=======
Button submit_button = new Button ( "Change Password", RecoverButtonListener );
Button cancel_button = new Button ( "Cancel", DefaultPageButtonListener );
EucalyptusKeyboardListener sl = new EucalyptusKeyboardListener(submit_button, cancel_button);
cleartextPassword1_box.addKeyUpHandler(sl);
cleartextPassword2_box.addKeyUpHandler(sl);
>>>>>>>
EucaButton submit_button = new EucaButton ( "Change Password", RecoverEucaButtonListener );
EucaButton cancel_button = new EucaButton ( "Cancel", DefaultPageEucaButtonListener );
EucalyptusKeyboardListener sl = new EucalyptusKeyboardListener(submit_button, cancel_button);
cleartextPassword1_box.addKeyUpHandler(sl);
cleartextPassword2_box.addKeyUpHandler(sl);
<<<<<<<
if (loggedInUser.getEmail().equalsIgnoreCase( "n/a" ) ) {
=======
if (loggedInUser.getEmail().equalsIgnoreCase( UserInfoWeb.BOGUS_ENTRY ) ) {
>>>>>>>
if (loggedInUser.getEmail().equalsIgnoreCase( UserInfoWeb.BOGUS_ENTRY ) ) { |
<<<<<<<
new ImageManifestFile(
this.getKernel( ).getManifestLocation( ),
BundleImageManifest.INSTANCE,
ImageConfiguration.getInstance( ).getMaxManifestSizeBytes( ) ),
partition.getNodeCertificate().getPublicKey( ),
this.getKernel( ).getDisplayName( ) + "-" + reservationId );
vmTypeInfo.setKernel( this.getKernel( ).getDisplayName( ), manifestLocation );
=======
new ImageManifestFile( this.getKernel( ).getManifestLocation( ), BundleImageManifest.INSTANCE ),
partition.getNodeCertificate().getPublicKey(), this.getKernel( ).getDisplayName( ) + "-" + reservationId, true);
vmTypeInfo.setKernel( this.getKernel( ).getDisplayName( ), manifestLocation, this.getKernel( ).getImageSizeBytes() );
>>>>>>>
new ImageManifestFile(
this.getKernel( ).getManifestLocation( ),
BundleImageManifest.INSTANCE,
ImageConfiguration.getInstance( ).getMaxManifestSizeBytes( ) ),
partition.getNodeCertificate().getPublicKey(),
this.getKernel( ).getDisplayName( ) + "-" + reservationId, true);
vmTypeInfo.setKernel( this.getKernel( ).getDisplayName( ), manifestLocation, this.getKernel( ).getImageSizeBytes() );
<<<<<<<
new ImageManifestFile(
this.getRamdisk( ).getManifestLocation( ),
BundleImageManifest.INSTANCE,
ImageConfiguration.getInstance( ).getMaxManifestSizeBytes( ) ),
partition.getNodeCertificate().getPublicKey( ),
this.getRamdisk( ).getDisplayName( ) + "-" + reservationId );
vmTypeInfo.setRamdisk( this.getRamdisk( ).getDisplayName( ), manifestLocation );
=======
new ImageManifestFile( this.getRamdisk( ).getManifestLocation( ), BundleImageManifest.INSTANCE ),
partition.getNodeCertificate().getPublicKey(), this.getRamdisk( ).getDisplayName( ) + "-" + reservationId, true);
vmTypeInfo.setRamdisk( this.getRamdisk( ).getDisplayName( ), manifestLocation, this.getRamdisk( ).getImageSizeBytes() );
>>>>>>>
new ImageManifestFile(
this.getRamdisk( ).getManifestLocation( ),
BundleImageManifest.INSTANCE,
ImageConfiguration.getInstance( ).getMaxManifestSizeBytes( )),
partition.getNodeCertificate().getPublicKey(),
this.getRamdisk( ).getDisplayName( ) + "-" + reservationId, true);
vmTypeInfo.setRamdisk( this.getRamdisk( ).getDisplayName( ), manifestLocation, this.getRamdisk( ).getImageSizeBytes() );
<<<<<<<
new ImageManifestFile(
((StaticDiskImage) this.getMachine( )).getRunManifestLocation( ),
BundleImageManifest.INSTANCE,
ImageConfiguration.getInstance( ).getMaxManifestSizeBytes( ) ),
partition.getNodeCertificate( ).getPublicKey( ),
reservationId );
=======
new ImageManifestFile( ((StaticDiskImage) this.getMachine()).getRunManifestLocation(), BundleImageManifest.INSTANCE ),
partition.getNodeCertificate().getPublicKey(), reservationId, true);
>>>>>>>
new ImageManifestFile(
((StaticDiskImage) this.getMachine( )).getRunManifestLocation( ),
BundleImageManifest.INSTANCE,
ImageConfiguration.getInstance( ).getMaxManifestSizeBytes( ) ),
partition.getNodeCertificate( ).getPublicKey( ),
reservationId, true); |
<<<<<<<
@Test
public void testIssue457() throws Exception {
/*
a="a\
b"
$a
*/
testTokenization("a=\"a\\b_\"\n$a", ASSIGNMENT_WORD, EQ, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_FEED, VARIABLE);
/*
a="a"\
"b_"
$a
*/
testTokenization("a=\"a\"\\\n\"b_\"\n$a", ASSIGNMENT_WORD, EQ, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_CONTINUATION, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_FEED, VARIABLE);
}
@Test
public void testIssue469() throws Exception {
testTokenization(BashVersion.Bash_v3, "(a) |& a b", LEFT_PAREN, WORD, RIGHT_PAREN, WHITESPACE, PIPE, AMP, WHITESPACE, WORD, WHITESPACE, WORD);
testTokenization(BashVersion.Bash_v4, "(a) |& a b", LEFT_PAREN, WORD, RIGHT_PAREN, WHITESPACE, PIPE_AMP, WHITESPACE, WORD, WHITESPACE, WORD);
}
@Test
public void testIssue473() throws Exception {
// `cat <<EOF
// X
// EOF`
testTokenization("`cat <<EOF\nX\nEOF`", BACKQUOTE, WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, BACKQUOTE);
// $(cat <<EOF
// X
// EOF
// )
testTokenization("$(cat <<EOF\nX\nEOF\n)", DOLLAR, LEFT_PAREN, WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED, RIGHT_PAREN);
}
=======
@Test
public void testIssue474() throws Exception {
//less-than should be replaced with a better token in the lexer
testTokenization("cat <<EOF;\nX\nEOF", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, SEMI, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
}
>>>>>>>
@Test
public void testIssue457() throws Exception {
/*
a="a\
b"
$a
*/
testTokenization("a=\"a\\b_\"\n$a", ASSIGNMENT_WORD, EQ, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_FEED, VARIABLE);
/*
a="a"\
"b_"
$a
*/
testTokenization("a=\"a\"\\\n\"b_\"\n$a", ASSIGNMENT_WORD, EQ, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_CONTINUATION, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_FEED, VARIABLE);
}
@Test
public void testIssue469() throws Exception {
testTokenization(BashVersion.Bash_v3, "(a) |& a b", LEFT_PAREN, WORD, RIGHT_PAREN, WHITESPACE, PIPE, AMP, WHITESPACE, WORD, WHITESPACE, WORD);
testTokenization(BashVersion.Bash_v4, "(a) |& a b", LEFT_PAREN, WORD, RIGHT_PAREN, WHITESPACE, PIPE_AMP, WHITESPACE, WORD, WHITESPACE, WORD);
}
@Test
public void testIssue473() throws Exception {
// `cat <<EOF
// X
// EOF`
testTokenization("`cat <<EOF\nX\nEOF`", BACKQUOTE, WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, BACKQUOTE);
// $(cat <<EOF
// X
// EOF
// )
testTokenization("$(cat <<EOF\nX\nEOF\n)", DOLLAR, LEFT_PAREN, WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED, RIGHT_PAREN);
}
@Test
public void testIssue474() throws Exception {
//less-than should be replaced with a better token in the lexer
testTokenization("cat <<EOF;\nX\nEOF", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, SEMI, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
} |
<<<<<<<
private static final ChannelHandler bindingHandler = new BindingHandler( );
private static final ChannelHandler internalImpersonationHandler = new InternalImpersonationHandler();
=======
private static final ChannelHandler bindingHandler = new BindingHandler( BindingManager.getDefaultBinding( ) );
>>>>>>>
private static final ChannelHandler bindingHandler = new BindingHandler( BindingManager.getDefaultBinding( ) );
private static final ChannelHandler internalImpersonationHandler = new InternalImpersonationHandler();
<<<<<<<
public static ChannelHandler internalImpersonationHandler() {
return internalImpersonationHandler;
}
=======
>>>>>>>
public static ChannelHandler internalImpersonationHandler() {
return internalImpersonationHandler;
} |
<<<<<<<
@Test
public void testIssue457() throws Exception {
/*
a="a\
b"
$a
*/
testTokenization("a=\"a\\b_\"\n$a", ASSIGNMENT_WORD, EQ, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_FEED, VARIABLE);
/*
a="a"\
"b_"
$a
*/
testTokenization("a=\"a\"\\\n\"b_\"\n$a", ASSIGNMENT_WORD, EQ, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_CONTINUATION, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_FEED, VARIABLE);
}
=======
@Test
public void testIssue469() throws Exception {
testTokenization(BashVersion.Bash_v3, "(a) |& a b", LEFT_PAREN, WORD, RIGHT_PAREN, WHITESPACE, PIPE, AMP, WHITESPACE, WORD, WHITESPACE, WORD);
testTokenization(BashVersion.Bash_v4, "(a) |& a b", LEFT_PAREN, WORD, RIGHT_PAREN, WHITESPACE, PIPE_AMP, WHITESPACE, WORD, WHITESPACE, WORD);
}
>>>>>>>
@Test
public void testIssue457() throws Exception {
/*
a="a\
b"
$a
*/
testTokenization("a=\"a\\b_\"\n$a", ASSIGNMENT_WORD, EQ, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_FEED, VARIABLE);
/*
a="a"\
"b_"
$a
*/
testTokenization("a=\"a\"\\\n\"b_\"\n$a", ASSIGNMENT_WORD, EQ, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_CONTINUATION, STRING_BEGIN, STRING_CONTENT, STRING_END, LINE_FEED, VARIABLE);
}
@Test
public void testIssue469() throws Exception {
testTokenization(BashVersion.Bash_v3, "(a) |& a b", LEFT_PAREN, WORD, RIGHT_PAREN, WHITESPACE, PIPE, AMP, WHITESPACE, WORD, WHITESPACE, WORD);
testTokenization(BashVersion.Bash_v4, "(a) |& a b", LEFT_PAREN, WORD, RIGHT_PAREN, WHITESPACE, PIPE_AMP, WHITESPACE, WORD, WHITESPACE, WORD);
} |
<<<<<<<
import com.eucalyptus.component.annotation.AdminService;
import com.eucalyptus.component.annotation.FaultLogPrefix;
import com.eucalyptus.component.annotation.GenerateKeys;
import com.eucalyptus.component.annotation.InternalService;
import com.eucalyptus.component.annotation.Partition;
import com.eucalyptus.component.annotation.PolicyVendor;
import com.eucalyptus.component.annotation.PublicService;
=======
import com.eucalyptus.component.annotation.AwsServiceName;
>>>>>>>
import com.eucalyptus.component.annotation.AdminService;
import com.eucalyptus.component.annotation.FaultLogPrefix;
import com.eucalyptus.component.annotation.GenerateKeys;
import com.eucalyptus.component.annotation.InternalService;
import com.eucalyptus.component.annotation.Partition;
import com.eucalyptus.component.annotation.PolicyVendor;
import com.eucalyptus.component.annotation.PublicService;
import com.eucalyptus.component.annotation.AwsServiceName; |
<<<<<<<
=======
public boolean isHeredocEnd(String text) {
return heredocLexingState.isNextHeredocMarker(text);
}
@Override
public boolean isHeredocEvaluating() {
return heredocLexingState.isExpectingEvaluatingHeredoc();
}
@Override
public void pushExpectedHeredocMarker(CharSequence expectedHeredocMarker) {
this.heredocLexingState.pushHeredocMarker(expectedHeredocMarker.toString());
}
@Override
public void popHeredocMarker(CharSequence marker) {
heredocLexingState.popHeredocMarker(marker.toString());
}
@Override
public boolean isHeredocMarkersEmpty() {
return heredocLexingState.isEmpty();
}
@Override
public boolean isStartNewArithExpression() {
return startNewArithExpression;
}
@Override
public void setStartNewArithExpression(boolean startNewArithExpression) {
this.startNewArithExpression = startNewArithExpression;
}
@Override
public boolean isExpectArithExpression() {
return expectArithExpression;
}
@Override
public void setExpectArithExpression(boolean expectArithExpression) {
this.expectArithExpression = expectArithExpression;
}
@Override
>>>>>>>
public boolean isHeredocEnd(String text) {
return heredocLexingState.isNextHeredocMarker(text);
}
@Override
public boolean isHeredocEvaluating() {
return heredocLexingState.isExpectingEvaluatingHeredoc();
}
@Override
public void pushExpectedHeredocMarker(CharSequence expectedHeredocMarker) {
this.heredocLexingState.pushHeredocMarker(expectedHeredocMarker.toString());
}
@Override
public void popHeredocMarker(CharSequence marker) {
heredocLexingState.popHeredocMarker(marker.toString());
}
@Override
public boolean isHeredocMarkersEmpty() {
return heredocLexingState.isEmpty();
}
@Override |
<<<<<<<
import com.eucalyptus.auth.Accounts;
import com.eucalyptus.auth.AuthException;
import com.eucalyptus.auth.Permissions;
import com.eucalyptus.auth.crypto.Digest;
import com.eucalyptus.auth.policy.PolicySpec;
import com.eucalyptus.auth.principal.Account;
=======
>>>>>>>
import com.eucalyptus.auth.Accounts;
import com.eucalyptus.auth.AuthException;
import com.eucalyptus.auth.Permissions;
import com.eucalyptus.auth.crypto.Digest;
import com.eucalyptus.auth.policy.PolicySpec;
import com.eucalyptus.auth.principal.Account;
<<<<<<<
import com.eucalyptus.context.Context;
import com.eucalyptus.context.Contexts;
=======
import com.eucalyptus.crypto.Digest;
>>>>>>>
import com.eucalyptus.context.Context;
import com.eucalyptus.context.Contexts;
import com.eucalyptus.crypto.Digest; |
<<<<<<<
public void testIssue89() throws Exception {
testTokenization("function a {\n}function", FUNCTION_KEYWORD, WHITESPACE, WORD, WHITESPACE, LEFT_CURLY, LINE_FEED, RIGHT_CURLY, FUNCTION_KEYWORD);
}
@Test
=======
public void testIssue327() throws Exception {
testTokenization("<< EOF\n\\$(a)\nEOF", HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
}
@Test
>>>>>>>
public void testIssue89() throws Exception {
testTokenization("function a {\n}function", FUNCTION_KEYWORD, WHITESPACE, WORD, WHITESPACE, LEFT_CURLY, LINE_FEED, RIGHT_CURLY, FUNCTION_KEYWORD);
}
@Test
public void testIssue327() throws Exception {
testTokenization("<< EOF\n\\$(a)\nEOF", HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
}
@Test |
<<<<<<<
import com.eucalyptus.auth.Groups;
import com.eucalyptus.auth.principal.Authorization;
import com.eucalyptus.auth.principal.AvailabilityZonePermission;
import com.eucalyptus.auth.principal.Group;
import com.eucalyptus.bootstrap.Component;
=======
>>>>>>> |
<<<<<<<
Collection<MetricStatistics> metrics;
if (namespace.startsWith("AWS/")) {
metrics = MetricManager.getMetricStatistics(ownerFullName.getAccountNumber(), ownerFullName.getUserId(), metricName, namespace, dimensionMap, MetricType.System, units, startTime, endTime, period);
} else {
metrics = MetricManager.getMetricStatistics(ownerFullName.getAccountNumber(), ownerFullName.getUserId(), metricName, namespace, dimensionMap, MetricType.Custom, units, startTime, endTime, period);
}
=======
Collection<MetricStatistics> metrics = MetricManager.getMetricStatistics(ownerFullName.getAccountNumber(), metricName, namespace, dimensionMap, MetricType.Custom, units, startTime, endTime, period);
>>>>>>>
Collection<MetricStatistics> metrics;
if (namespace.startsWith("AWS/")) {
metrics = MetricManager.getMetricStatistics(ownerFullName.getAccountNumber(), metricName, namespace, dimensionMap, MetricType.System, units, startTime, endTime, period);
} else {
metrics = MetricManager.getMetricStatistics(ownerFullName.getAccountNumber(), metricName, namespace, dimensionMap, MetricType.Custom, units, startTime, endTime, period);
} |
<<<<<<<
// Update the instance info
if (action.properties.getInstanceId() != null) {
EC2Helper.refreshInstanceAttributes(action.getStackEntity(), action.properties.getInstanceId(), action.info.getEffectiveUserId(), action.getStackEntity().getUpdateVersion());
}
=======
>>>>>>> |
<<<<<<<
import java.util.Date;
import java.util.NoSuchElementException;
=======
>>>>>>>
import java.util.NoSuchElementException;
import java.util.Date;
<<<<<<<
import com.eucalyptus.auth.CredentialProvider;
import com.eucalyptus.auth.NoSuchUserException;
import com.eucalyptus.auth.User;
import com.eucalyptus.cluster.Cluster;
import com.eucalyptus.cluster.Clusters;
import com.eucalyptus.cluster.callback.BundleCallback;
import com.eucalyptus.cluster.callback.CancelBundleCallback;
import com.eucalyptus.cluster.callback.PasswordDataCallback;
=======
>>>>>>>
import com.eucalyptus.cluster.Cluster;
import com.eucalyptus.cluster.Clusters;
import com.eucalyptus.cluster.callback.BundleCallback;
import com.eucalyptus.cluster.callback.CancelBundleCallback;
import com.eucalyptus.cluster.callback.PasswordDataCallback;
<<<<<<<
import com.eucalyptus.ws.util.Messaging;
import edu.ucsb.eucalyptus.cloud.VmAllocationInfo;
import edu.ucsb.eucalyptus.cloud.cluster.VmInstance;
import edu.ucsb.eucalyptus.cloud.cluster.VmInstances;
import edu.ucsb.eucalyptus.cloud.entities.SystemConfiguration;
import edu.ucsb.eucalyptus.constants.EventType;
import edu.ucsb.eucalyptus.constants.VmState;
import edu.ucsb.eucalyptus.msgs.BundleInstanceResponseType;
import edu.ucsb.eucalyptus.msgs.BundleInstanceType;
import edu.ucsb.eucalyptus.msgs.BundleTask;
import edu.ucsb.eucalyptus.msgs.CancelBundleTaskResponseType;
import edu.ucsb.eucalyptus.msgs.CancelBundleTaskType;
import edu.ucsb.eucalyptus.msgs.DescribeBundleTasksResponseType;
import edu.ucsb.eucalyptus.msgs.DescribeBundleTasksType;
import edu.ucsb.eucalyptus.msgs.DescribeInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.DescribeInstancesType;
import edu.ucsb.eucalyptus.msgs.EucalyptusErrorMessageType;
import edu.ucsb.eucalyptus.msgs.EventRecord;
import edu.ucsb.eucalyptus.msgs.GetConsoleOutputType;
import edu.ucsb.eucalyptus.msgs.GetPasswordDataResponseType;
import edu.ucsb.eucalyptus.msgs.GetPasswordDataType;
import edu.ucsb.eucalyptus.msgs.RebootInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.RebootInstancesType;
import edu.ucsb.eucalyptus.msgs.TerminateInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.TerminateInstancesType;
=======
import edu.ucsb.eucalyptus.cloud.VmAllocationInfo;
import edu.ucsb.eucalyptus.msgs.DescribeInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.DescribeInstancesType;
import edu.ucsb.eucalyptus.msgs.GetConsoleOutputType;
import edu.ucsb.eucalyptus.msgs.RebootInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.RebootInstancesType;
import edu.ucsb.eucalyptus.msgs.TerminateInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.TerminateInstancesType;
>>>>>>>
import com.eucalyptus.ws.util.Messaging;
import com.eucalyptus.auth.Users;
import com.eucalyptus.auth.principal.User;
import com.eucalyptus.auth.NoSuchUserException;
import edu.ucsb.eucalyptus.cloud.VmAllocationInfo;
import edu.ucsb.eucalyptus.cloud.cluster.VmInstance;
import edu.ucsb.eucalyptus.cloud.cluster.VmInstances;
import edu.ucsb.eucalyptus.cloud.entities.SystemConfiguration;
import com.eucalyptus.records.EventType;
import com.eucalyptus.vm.VmState;
import edu.ucsb.eucalyptus.msgs.BundleInstanceResponseType;
import edu.ucsb.eucalyptus.msgs.BundleInstanceType;
import edu.ucsb.eucalyptus.msgs.BundleTask;
import edu.ucsb.eucalyptus.msgs.CancelBundleTaskResponseType;
import edu.ucsb.eucalyptus.msgs.CancelBundleTaskType;
import edu.ucsb.eucalyptus.msgs.DescribeBundleTasksResponseType;
import edu.ucsb.eucalyptus.msgs.DescribeBundleTasksType;
import edu.ucsb.eucalyptus.msgs.DescribeInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.DescribeInstancesType;
import edu.ucsb.eucalyptus.msgs.EucalyptusErrorMessageType;
import edu.ucsb.eucalyptus.msgs.EventRecord;
import edu.ucsb.eucalyptus.msgs.GetConsoleOutputType;
import edu.ucsb.eucalyptus.msgs.GetPasswordDataResponseType;
import edu.ucsb.eucalyptus.msgs.GetPasswordDataType;
import edu.ucsb.eucalyptus.msgs.RebootInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.RebootInstancesType;
import edu.ucsb.eucalyptus.msgs.TerminateInstancesResponseType;
import edu.ucsb.eucalyptus.msgs.TerminateInstancesType; |
<<<<<<<
=======
import com.eucalyptus.auth.Groups;
import com.eucalyptus.auth.Users;
import com.eucalyptus.auth.crypto.Crypto;
>>>>>>>
import com.eucalyptus.auth.crypto.Crypto; |
<<<<<<<
sysConf.getDefaultKernel(),
sysConf.getDefaultRamdisk(),
sysConf.getDnsDomain(),
sysConf.getNameserver(),
sysConf.getNameserverAddress());
=======
sysConf.getDefaultKernel(), sysConf.getDefaultRamdisk(),
sysConf.getMaxUserPublicAddresses(), sysConf.isDoDynamicPublicAddresses(), sysConf.getSystemReservedPublicAddresses() );
>>>>>>>
sysConf.getDefaultKernel(),
sysConf.getDefaultRamdisk(),
sysConf.getDefaultKernel(), sysConf.getDefaultRamdisk(),
sysConf.getMaxUserPublicAddresses(), sysConf.isDoDynamicPublicAddresses(), sysConf.getSystemReservedPublicAddresses(),
sysConf.getDnsDomain(),
sysConf.getNameserver(),
sysConf.getNameserverAddress());
<<<<<<<
if(sysConf.getDnsDomain() == null) {
sysConf.setDnsDomain(DNSProperties.DOMAIN);
}
if(sysConf.getNameserver() == null) {
sysConf.setNameserver(DNSProperties.NS_HOST);
}
if(sysConf.getNameserverAddress() == null) {
sysConf.setNameserverAddress(DNSProperties.NS_IP);
}
=======
if( sysConf.getMaxUserPublicAddresses() == null ) {
sysConf.setMaxUserPublicAddresses( 5 );
}
if( sysConf.isDoDynamicPublicAddresses() == null ) {
sysConf.setDoDynamicPublicAddresses( true );
}
if( sysConf.getSystemReservedPublicAddresses() == null ) {
sysConf.setSystemReservedPublicAddresses( 10 );
}
>>>>>>>
if(sysConf.getDnsDomain() == null) {
sysConf.setDnsDomain(DNSProperties.DOMAIN);
}
if(sysConf.getNameserver() == null) {
sysConf.setNameserver(DNSProperties.NS_HOST);
}
if(sysConf.getNameserverAddress() == null) {
sysConf.setNameserverAddress(DNSProperties.NS_IP);
if( sysConf.getMaxUserPublicAddresses() == null ) {
sysConf.setMaxUserPublicAddresses( 5 );
}
if( sysConf.isDoDynamicPublicAddresses() == null ) {
sysConf.setDoDynamicPublicAddresses( true );
}
if( sysConf.getSystemReservedPublicAddresses() == null ) {
sysConf.setSystemReservedPublicAddresses( 10 );
}
<<<<<<<
sysConf.setDnsDomain(systemConfig.getDnsDomain());
sysConf.setNameserver(systemConfig.getNameserver());
sysConf.setNameserverAddress(systemConfig.getNameserverAddress());
=======
sysConf.setMaxUserPublicAddresses( systemConfig.getMaxUserPublicAddresses() );
sysConf.setDoDynamicPublicAddresses( systemConfig.isDoDynamicPublicAddresses() );
sysConf.setSystemReservedPublicAddresses( systemConfig.getSystemReservedPublicAddresses() );
>>>>>>>
sysConf.setDnsDomain(systemConfig.getDnsDomain());
sysConf.setNameserver(systemConfig.getNameserver());
sysConf.setNameserverAddress(systemConfig.getNameserverAddress());
sysConf.setMaxUserPublicAddresses( systemConfig.getMaxUserPublicAddresses() );
sysConf.setDoDynamicPublicAddresses( systemConfig.isDoDynamicPublicAddresses() );
sysConf.setSystemReservedPublicAddresses( systemConfig.getSystemReservedPublicAddresses() );
<<<<<<<
systemConfig.getStorageVolumesPath(),
systemConfig.getDnsDomain(),
systemConfig.getNameserver(),
systemConfig.getNameserverAddress()));
=======
systemConfig.getStorageVolumesPath(),
systemConfig.getMaxUserPublicAddresses(),
systemConfig.isDoDynamicPublicAddresses(),
systemConfig.getSystemReservedPublicAddresses()));
>>>>>>>
systemConfig.getStorageVolumesPath(),
systemConfig.getMaxUserPublicAddresses(),
systemConfig.isDoDynamicPublicAddresses(),
systemConfig.getSystemReservedPublicAddresses()),
systemConfig.getDnsDomain(),
systemConfig.getNameserver(),
systemConfig.getNameserverAddress())); |
<<<<<<<
import com.eucalyptus.bootstrap.Component;
=======
import com.eucalyptus.component.auth.SystemCredentialProvider;
import com.eucalyptus.component.id.Storage;
>>>>>>>
import com.eucalyptus.component.id.Storage; |
<<<<<<<
* Copyright 2009-2016 Eucalyptus Systems, Inc.
=======
* Copyright 2008 Regents of the University of California
* Copyright 2009-2015 Ent. Services Development Corporation LP
>>>>>>>
* Copyright 2008 Regents of the University of California
* Copyright 2009-2016 Ent. Services Development Corporation LP |
<<<<<<<
* @param baseManifest the base manifest
=======
>>>>>>>
* @param baseManifest the base manifest
<<<<<<<
=======
* @param urlForNc indicates if urs are constructed for NC use
* @param baseManifestLocation location of the base manifest file
* @param manifestType what kind of manifest
>>>>>>>
* @param urlForNc indicates if urs are constructed for NC use
<<<<<<<
final String manifestName, int expirationHours) throws DownloadManifestException {
try ( final EucaS3Client s3Client = EucaS3ClientFactory.getEucaS3Client(Accounts.lookupAwsExecReadAdmin(true)) ) {
//prepare to do pre-signed urls
=======
final String manifestName, int expirationHours, boolean urlForNc) throws DownloadManifestException {
try {
//prepare to do pre-signed urls
EucaS3Client s3Client = EucaS3ClientFactory.getEucaS3Client(getDownloadManifestS3User());
if (!urlForNc)
s3Client.refreshEndpoint(true);
>>>>>>>
final String manifestName, int expirationHours, boolean urlForNc) throws DownloadManifestException {
try ( final EucaS3Client s3Client = EucaS3ClientFactory.getEucaS3Client(Accounts.lookupAwsExecReadAdmin(true)) ) {
//prepare to do pre-signed urls
if (!urlForNc)
s3Client.refreshEndpoint(true); |
<<<<<<<
=======
import com.eucalyptus.component.Components;
import com.eucalyptus.component.Dispatcher;
>>>>>>>
import com.eucalyptus.component.Components;
<<<<<<<
ServiceDispatcher.lookup( Component.storage, sc.getHostName( ) ).send( new DetachStorageVolumeType(
cluster.getNode( vm.getServiceTag( ) ).getIqn( ),
volume.getVolumeId( ) ) );
vm.getVolumes( ).remove( volume );
=======
vm.removeVolumeAttachment( arg0.getVolumeId( ) );
Dispatcher scDispatcher = ServiceDispatcher.lookup( Components.lookup("storage"), sc.getHostName( ) );
scDispatcher.send( new DetachStorageVolumeType( cluster.getNode( vm.getServiceTag( ) ).getIqn( ), arg0.getVolumeId( ) ) );
return true;
>>>>>>>
ServiceDispatcher.lookup( Components.lookup("storage"), sc.getHostName( ) ).send( new DetachStorageVolumeType(
cluster.getNode( vm.getServiceTag( ) ).getIqn( ),
volume.getVolumeId( ) ) );
vm.getVolumes( ).remove( volume ); |
<<<<<<<
=======
import java.util.Map;
import java.util.Objects;
>>>>>>>
import java.util.Map;
import java.util.Objects;
<<<<<<<
=======
public static List<Account> listAccountsByStatus( final User.RegistrationStatus status ) throws AuthException {
return Accounts.getAccountProvider( ).listAccountsByStatus( status );
}
public static boolean isSystemAccount( String accountName ) {
return
Account.SYSTEM_ACCOUNT.equals( accountName ) ||
Objects.toString( accountName, "" ).startsWith( Account.SYSTEM_ACCOUNT_PREFIX );
}
public static boolean isSystemAccount( Account account ) {
return isSystemAccount( account == null ? null : account.getName( ) );
}
>>>>>>>
public static boolean isSystemAccount( String accountName ) {
return
Account.SYSTEM_ACCOUNT.equals( accountName ) ||
Objects.toString( accountName, "" ).startsWith( Account.SYSTEM_ACCOUNT_PREFIX );
}
public static boolean isSystemAccount( Account account ) {
return isSystemAccount( account == null ? null : account.getName( ) );
} |
<<<<<<<
import com.eucalyptus.auth.SystemCredentialProvider;
=======
import com.eucalyptus.auth.NoSuchUserException;
import com.eucalyptus.component.auth.SystemCredentialProvider;
>>>>>>>
import com.eucalyptus.component.auth.SystemCredentialProvider;
<<<<<<<
import com.eucalyptus.auth.SystemCredentialProvider;
=======
import com.eucalyptus.auth.NoSuchUserException;
import com.eucalyptus.component.auth.SystemCredentialProvider;
>>>>>>>
import com.eucalyptus.component.auth.SystemCredentialProvider; |
<<<<<<<
import com.google.common.base.Function;
=======
import com.eucalyptus.vm.VmVolumeAttachment;
>>>>>>>
import com.google.common.base.Function;
import com.eucalyptus.vm.VmVolumeAttachment; |
<<<<<<<
import edu.ucsb.eucalyptus.msgs.DescribeRegionsResponseType;
import edu.ucsb.eucalyptus.msgs.DescribeRegionsType;
=======
import edu.ucsb.eucalyptus.msgs.DescribeBundleTasksType;
import edu.ucsb.eucalyptus.msgs.DescribeRegionsResponseType;
import edu.ucsb.eucalyptus.msgs.DescribeRegionsType;
>>>>>>>
import edu.ucsb.eucalyptus.msgs.DescribeBundleTasksType;
import edu.ucsb.eucalyptus.msgs.DescribeRegionsResponseType;
import edu.ucsb.eucalyptus.msgs.DescribeRegionsType;
<<<<<<<
import edu.ucsb.eucalyptus.msgs.RegionInfoType;
=======
import edu.ucsb.eucalyptus.msgs.RegionInfoType;
import edu.ucsb.eucalyptus.msgs.UnimplementedMessage;
>>>>>>>
import edu.ucsb.eucalyptus.msgs.RegionInfoType;
import edu.ucsb.eucalyptus.msgs.UnimplementedMessage;
<<<<<<<
"CreateVolume", "CreateSnapshot", "DeleteVolume", "DeleteSnapshot",
"DescribeVolumes", "DescribeSnapshots", "AttachVolume", "DetachVolume",
"DescribeRegions",
"BundleInstance","DescribeBundleTasks","CancelBundleTask",
"DescribeReservedInstances","DescribeReservedInstancesOfferings","PurchaseReservedInstancesOffering" } )
public EucalyptusMessage handle( EucalyptusMessage msg ) {
if( msg instanceof DescribeRegionsType ) {
DescribeRegionsResponseType reply = (DescribeRegionsResponseType ) msg.getReply();
try {
SystemConfiguration config = EucalyptusProperties.getSystemConfiguration();
reply.getRegionInfo().add(new RegionInfoType( "Eucalyptus", config.getStorageUrl().replaceAll( "Walrus", "Eucalyptus" )));
reply.getRegionInfo().add(new RegionInfoType( "Walrus", config.getStorageUrl()));
} catch ( EucalyptusCloudException e ) {}
return reply;
}
LOG.info( EventRecord.create( this.getClass().getSimpleName(), msg.getUserId(), msg.getCorrelationId(), EventType.MSG_RECEIVED, msg.getClass().getSimpleName() ) );
=======
"CreateVolume", "CreateSnapshot", "DeleteVolume", "DeleteSnapshot", "DescribeVolumes","DescribeSnapshots", "AttachVolume","DetachVolume",
"DescribeRegions", "DescribeBundleTasks",
"DescribeReservedInstancesOfferings", "DescribeReservedInstances" } )
public EucalyptusMessage handle( EucalyptusMessage msg )
{
if( msg instanceof UnimplementedMessage ) {
return msg.getReply();
}
if( msg instanceof DescribeRegionsType ) {
DescribeRegionsResponseType reply = ( DescribeRegionsResponseType ) msg.getReply();
try {
SystemConfiguration config = EucalyptusProperties.getSystemConfiguration();
reply.getRegionInfo().add(new RegionInfoType( "Eucalyptus", config.getStorageUrl().replaceAll( "Walrus", "Eucalyptus" )));
reply.getRegionInfo().add(new RegionInfoType( "Walrus", config.getStorageUrl()));
} catch ( EucalyptusCloudException e ) {}
return reply;
} else if ( msg instanceof DescribeBundleTasksType ) {
return msg.getReply();
}
LOG.info( EventRecord.create( this.getClass().getSimpleName(), msg.getUserId(), msg.getCorrelationId(), EventType.MSG_RECEIVED, msg.getClass().getSimpleName() )) ;
>>>>>>>
"CreateVolume", "CreateSnapshot", "DeleteVolume", "DeleteSnapshot",
"DescribeVolumes", "DescribeSnapshots", "AttachVolume", "DetachVolume",
"DescribeRegions",
"BundleInstance","DescribeBundleTasks","CancelBundleTask",
"DescribeReservedInstances","DescribeReservedInstancesOfferings","PurchaseReservedInstancesOffering" } )
public EucalyptusMessage handle( EucalyptusMessage msg )
{
if( msg instanceof UnimplementedMessage ) {
return msg.getReply();
}
if( msg instanceof DescribeRegionsType ) {
DescribeRegionsResponseType reply = ( DescribeRegionsResponseType ) msg.getReply();
try {
SystemConfiguration config = EucalyptusProperties.getSystemConfiguration();
reply.getRegionInfo().add(new RegionInfoType( "Eucalyptus", config.getStorageUrl().replaceAll( "Walrus", "Eucalyptus" )));
reply.getRegionInfo().add(new RegionInfoType( "Walrus", config.getStorageUrl()));
} catch ( EucalyptusCloudException e ) {}
return reply;
} else if ( msg instanceof DescribeBundleTasksType ) {
return msg.getReply();
}
LOG.info( EventRecord.create( this.getClass().getSimpleName(), msg.getUserId(), msg.getCorrelationId(), EventType.MSG_RECEIVED, msg.getClass().getSimpleName() ) ); |
<<<<<<<
public void testIssue266() throws Exception {
testTokenization("${#}", DOLLAR, LEFT_CURLY, PARAM_EXPANSION_OP_HASH, RIGHT_CURLY);
}
@Test
=======
public void testIssue270() throws Exception {
//heredoc without evaluation
testTokenization("cat <<'EOF'\n" +
" echo ${counter}\n" +
"EOF", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
//heredoc with evaluation
testTokenization("cat <<EOF\n" +
" echo ${counter}\n" +
"EOF", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, DOLLAR, LEFT_CURLY, WORD, RIGHT_CURLY, HEREDOC_CONTENT, HEREDOC_MARKER_END);
}
@Test
>>>>>>>
public void testIssue266() throws Exception {
testTokenization("${#}", DOLLAR, LEFT_CURLY, PARAM_EXPANSION_OP_HASH, RIGHT_CURLY);
}
@Test
public void testIssue270() throws Exception {
//heredoc without evaluation
testTokenization("cat <<'EOF'\n" +
" echo ${counter}\n" +
"EOF", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
//heredoc with evaluation
testTokenization("cat <<EOF\n" +
" echo ${counter}\n" +
"EOF", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, DOLLAR, LEFT_CURLY, WORD, RIGHT_CURLY, HEREDOC_CONTENT, HEREDOC_MARKER_END);
}
@Test |
<<<<<<<
import com.eucalyptus.tags.Filter;
import com.eucalyptus.tags.FilterSupport;
import com.eucalyptus.tags.Filters;
import com.eucalyptus.util.EucalyptusCloudException;
=======
import com.eucalyptus.util.async.AsyncRequests;
>>>>>>>
import com.eucalyptus.tags.Filter;
import com.eucalyptus.tags.FilterSupport;
import com.eucalyptus.tags.Filters;
import com.eucalyptus.util.EucalyptusCloudException;
import com.eucalyptus.util.async.AsyncRequests;
<<<<<<<
public DescribeAvailabilityZonesResponseType DescribeAvailabilityZones( DescribeAvailabilityZonesType request ) throws EucalyptusCloudException {
final DescribeAvailabilityZonesResponseType reply = ( DescribeAvailabilityZonesResponseType ) request.getReply( );
final List<String> args = request.getAvailabilityZoneSet( );
final Filter filter = Filters.generate( request.getFilterSet(), Cluster.class );
=======
public EvacuateNodeResponseType evacuateNode( EvacuateNodeType request ) {
EvacuateNodeResponseType reply = request.getReply( );
String serviceTag = request.getServiceTag( );
for ( ServiceConfiguration c : Topology.enabledServices( ClusterController.class ) ) {
if ( Clusters.lookup( c ).getNodeMap( ).containsKey( serviceTag ) ) {
try {
//0. gate this cluster
//1. describe resources
//2. describe nodes
//3. find all vms running on NC@serviceTag
//4. authorize all NCs to attach volumes which are attached to vms from #3
//5. send the operation down
AsyncRequests.sendSync( c, request );
//5.a. when the above returns that means that:
// - the request has been accepted and can be executed
// - the other state interrogating operations (DescribeResources) will reflect the resources committed to the evacuation.
//6. describe resources
//7. wait to determine migration schedule
//8. wait for migration to complete
//8.a. migration schedule will say where vms from #3 are moving
//9. authorize volume attachments only for the NCs which now host the vms from #3
//10. ungate the cluster
return reply.markWinning( );
} catch ( Exception ex ) {
LOG.error( ex , ex );
}
}
}
return reply.markFailed( );
}
public DescribeAvailabilityZonesResponseType DescribeAvailabilityZones( DescribeAvailabilityZonesType request ) {
DescribeAvailabilityZonesResponseType reply = ( DescribeAvailabilityZonesResponseType ) request.getReply( );
List<String> args = request.getAvailabilityZoneSet( );
>>>>>>>
public EvacuateNodeResponseType evacuateNode( EvacuateNodeType request ) {
EvacuateNodeResponseType reply = request.getReply( );
String serviceTag = request.getServiceTag( );
for ( ServiceConfiguration c : Topology.enabledServices( ClusterController.class ) ) {
if ( Clusters.lookup( c ).getNodeMap( ).containsKey( serviceTag ) ) {
try {
//0. gate this cluster
//1. describe resources
//2. describe nodes
//3. find all vms running on NC@serviceTag
//4. authorize all NCs to attach volumes which are attached to vms from #3
//5. send the operation down
AsyncRequests.sendSync( c, request );
//5.a. when the above returns that means that:
// - the request has been accepted and can be executed
// - the other state interrogating operations (DescribeResources) will reflect the resources committed to the evacuation.
//6. describe resources
//7. wait to determine migration schedule
//8. wait for migration to complete
//8.a. migration schedule will say where vms from #3 are moving
//9. authorize volume attachments only for the NCs which now host the vms from #3
//10. ungate the cluster
return reply.markWinning( );
} catch ( Exception ex ) {
LOG.error( ex , ex );
}
}
}
return reply.markFailed( );
}
public DescribeAvailabilityZonesResponseType DescribeAvailabilityZones( DescribeAvailabilityZonesType request ) throws EucalyptusCloudException {
final DescribeAvailabilityZonesResponseType reply = ( DescribeAvailabilityZonesResponseType ) request.getReply( );
final List<String> args = request.getAvailabilityZoneSet( );
final Filter filter = Filters.generate( request.getFilterSet(), Cluster.class ); |
<<<<<<<
public static final String EUCA_ROOT_WRAPPER = "/usr/lib/eucalyptus/euca_rootwrap";
public static final String blockSize = "1M";
=======
public static String SAN_HOST = "sanHost";
public static String SAN_USERNAME = "sanUser";
public static String SAN_PASSWORD = "sanPassword";
public static String DAS_DEVICE = "/dev/blockdev";
>>>>>>>
public static final String EUCA_ROOT_WRAPPER = "/usr/lib/eucalyptus/euca_rootwrap";
public static final String blockSize = "1M";
public static String SAN_HOST = "sanHost";
public static String SAN_USERNAME = "sanUser";
public static String SAN_PASSWORD = "sanPassword";
public static String DAS_DEVICE = "/dev/blockdev"; |
<<<<<<<
import java.io.OutputStream;
import java.util.List;
import javax.ws.rs.core.MediaType;
import org.apache.syncope.common.types.AuditElements.Category;
import org.apache.syncope.common.types.AuditElements.Result;
import org.apache.syncope.common.types.AuditElements.WorkflowSubCategory;
import org.apache.syncope.core.audit.AuditManager;
=======
import java.lang.reflect.Method;
import org.apache.syncope.common.to.WorkflowDefinitionTO;
>>>>>>>
import java.io.OutputStream;
import java.lang.reflect.Method;
import javax.ws.rs.core.MediaType;
import org.apache.syncope.common.AbstractBaseBean;
<<<<<<<
private void exportDefinition(
final WorkflowAdapter adapter, final WorkflowDefinitionFormat format, final OutputStream os)
throws WorkflowException {
adapter.exportDefinition(format, os);
auditManager.audit(Category.workflow, WorkflowSubCategory.exportDefinition, Result.success,
"Successfully exported workflow definition");
}
private WorkflowDefinitionFormat getFormat(final MediaType format) {
return format.equals(MediaType.APPLICATION_JSON_TYPE)
? WorkflowDefinitionFormat.JSON
: WorkflowDefinitionFormat.XML;
=======
private WorkflowDefinitionTO getDefinition(final WorkflowAdapter adapter) throws WorkflowException {
WorkflowDefinitionTO result = adapter.getDefinition();
return result;
>>>>>>>
private void exportDefinition(
final WorkflowAdapter adapter, final WorkflowDefinitionFormat format, final OutputStream os)
throws WorkflowException {
adapter.exportDefinition(format, os);
}
private WorkflowDefinitionFormat getFormat(final MediaType format) {
return format.equals(MediaType.APPLICATION_JSON_TYPE)
? WorkflowDefinitionFormat.JSON
: WorkflowDefinitionFormat.XML;
<<<<<<<
private List<String> getDefinedTasks(final WorkflowAdapter adapter) {
List<String> definedTasks = adapter.getDefinedTasks();
auditManager.audit(Category.workflow, WorkflowSubCategory.getDefinedTasks, Result.success,
"Successfully got the list of defined workflow tasks: " + definedTasks.size());
return definedTasks;
}
@PreAuthorize("hasRole('WORKFLOW_TASK_LIST')")
public List<String> getDefinedUserTasks() {
return getDefinedTasks(uwfAdapter);
}
@PreAuthorize("hasRole('WORKFLOW_TASK_LIST')")
public List<String> getDefinedRoleTasks() {
return getDefinedTasks(rwfAdapter);
=======
/**
* {@inheritDoc}
*/
@Override
protected WorkflowDefinitionTO resolveReference(final Method method, final Object... args)
throws UnresolvedReferenceException {
throw new UnresolvedReferenceException();
>>>>>>>
/**
* {@inheritDoc}
*/
@Override
protected AbstractBaseBean resolveReference(final Method method, final Object... args)
throws UnresolvedReferenceException {
throw new UnresolvedReferenceException(); |
<<<<<<<
=======
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.MediaType;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.syncope.common.SyncopeConstants;
>>>>>>>
import org.apache.commons.lang3.ArrayUtils;
<<<<<<<
@Component
public class ConfigurationController extends AbstractController {
@Autowired
private AuditManager auditManager;
=======
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.ModelAndView;
@Controller
@RequestMapping("/configuration")
public class ConfigurationController extends AbstractTransactionalController<ConfigurationTO> {
>>>>>>>
@Component
public class ConfigurationController extends AbstractTransactionalController<ConfigurationTO> {
<<<<<<<
auditManager.audit(Category.configuration, ConfigurationSubCategory.create, Result.success,
"Successfully created conf: " + conf.getKey());
=======
response.setStatus(HttpServletResponse.SC_CREATED);
>>>>>>>
<<<<<<<
public Set<String> getValidators() {
Set<String> validators = classNamesLoader.getClassNames(ImplementationClassNamesLoader.Type.VALIDATOR);
auditManager.audit(Category.configuration, ConfigurationSubCategory.getValidators, Result.success,
"Successfully listed all validators: " + validators.size());
return validators;
=======
@RequestMapping(method = RequestMethod.GET, value = "/validators")
public ModelAndView getValidators() {
return new ModelAndView().addObject(
classNamesLoader.getClassNames(ImplementationClassNamesLoader.Type.VALIDATOR));
>>>>>>>
public Set<String> getValidators() {
return classNamesLoader.getClassNames(ImplementationClassNamesLoader.Type.VALIDATOR);
<<<<<<<
auditManager.audit(Category.configuration, ConfigurationSubCategory.getMailTemplates, Result.success,
"Successfully listed all mail templates: " + htmlTemplates.size());
return htmlTemplates;
=======
return new ModelAndView().addObject(htmlTemplates);
}
@RequestMapping(method = RequestMethod.GET, value = "/dbexport")
public void dbExport(final HttpServletResponse response) {
response.setContentType(MediaType.TEXT_XML);
response.setHeader(SyncopeConstants.CONTENT_DISPOSITION_HEADER,
"attachment; filename=" + ContentLoader.CONTENT_XML);
try {
dbExportInternal(response.getOutputStream());
} catch (IOException e) {
LOG.error("Getting servlet output stream", e);
}
>>>>>>>
return htmlTemplates; |
<<<<<<<
=======
// GuardedString is not in classpath
private static final String GUARDED_STRING = "org.identityconnectors.common.security.GuardedString";
// GuardedByteArray is not in classpath
private static final String GUARDED_BYTE_ARRAY = "org.identityconnectors.common.security.GuardedByteArray";
private static final Class[] NUMBER = { Integer.class, Double.class, Long.class,
Float.class, Number.class, Integer.TYPE, Long.TYPE, Double.TYPE, Float.TYPE };
>>>>>>>
<<<<<<<
new Model<String>(bundleTO == null ? null : bundleTO.getLocation()));
((DropDownChoice<String>) location.getField()).setNullValid(true);
=======
new Model<String>(bundleTO == null ? null : bundleTO.getLocation()));
((DropDownChoice) location.getField()).setNullValid(true);
>>>>>>>
new Model<String>(bundleTO == null ? null : bundleTO.getLocation()));
((DropDownChoice<String>) location.getField()).setNullValid(true);
<<<<<<<
new Model<String>(bundleTO == null ? null : bundleTO.getBundleName()));
((DropDownChoice<String>) connectorName.getField()).setNullValid(true);
=======
new Model<String>(bundleTO == null ? null : bundleTO.getBundleName()));
((DropDownChoice) connectorName.getField()).setNullValid(true);
>>>>>>>
new Model<String>(bundleTO == null ? null : bundleTO.getBundleName()));
((DropDownChoice<String>) connectorName.getField()).setNullValid(true);
<<<<<<<
if (ClassUtils.isPrimitiveOrWrapper(propertySchemaClass)) {
propertySchemaClass =
org.apache.commons.lang3.ClassUtils.primitiveToWrapper(propertySchemaClass);
}
} catch (Exception e) {
LOG.error("Error parsing attribute type", e);
propertySchemaClass = String.class;
}
if (ClassUtils.isAssignable(Number.class, propertySchemaClass)) {
field = new SpinnerFieldPanel<Number>("panel", label.getDefaultModelObjectAsString(),
(Class<Number>) propertySchemaClass, new Model<Number>(), null, null);
required = property.getSchema().isRequired();
} else if (ClassUtils.isAssignable(Boolean.class, propertySchemaClass)) {
field = new AjaxCheckBoxPanel("panel",
label.getDefaultModelObjectAsString(), new Model<Boolean>());
} else {
field = new AjaxTextFieldPanel("panel",
label.getDefaultModelObjectAsString(), new Model<String>());
required = property.getSchema().isRequired();
}
if (String[].class.equals(propertySchemaClass)) {
isArray = true;
}
}
field.setTitle(property.getSchema().getHelpMessage());
if (required) {
field.addRequiredLabel();
}
if (isArray) {
if (property.getValues().isEmpty()) {
property.getValues().add(null);
}
item.add(new MultiFieldPanel<String>(
"panel", new PropertyModel<List<String>>(property, "values"), field));
} else {
field.setNewModel(property.getValues());
item.add(field);
}
final AjaxCheckBoxPanel overridable = new AjaxCheckBoxPanel("connPropAttrOverridable",
"connPropAttrOverridable", new PropertyModel<Boolean>(property, "overridable"));
item.add(overridable);
connInstanceTO.getConfiguration().add(property);
=======
} catch (Exception e) {
LOG.error("Error parsing attribute type", e);
propertySchemaClass = String.class;
}
if (ArrayUtils.contains(NUMBER, propertySchemaClass)) {
field = new AjaxNumberFieldPanel("panel",
label.getDefaultModelObjectAsString(), new Model<Number>(),
ClassUtils.resolvePrimitiveIfNecessary(propertySchemaClass));
required = property.getSchema().isRequired();
} else if (Boolean.class.equals(propertySchemaClass)
|| boolean.class.equals(propertySchemaClass)) {
field = new AjaxCheckBoxPanel("panel",
label.getDefaultModelObjectAsString(), new Model<Boolean>());
} else {
field = new AjaxTextFieldPanel("panel",
label.getDefaultModelObjectAsString(), new Model<String>());
required = property.getSchema().isRequired();
}
if (propertySchemaClass.isArray()) {
isArray = true;
}
}
field.setTitle(property.getSchema().getHelpMessage());
if (required) {
field.addRequiredLabel();
}
if (isArray) {
if (property.getValues().isEmpty()) {
property.getValues().add(null);
}
item.add(new MultiValueSelectorPanel<String>(
"panel", new PropertyModel<List<String>>(property, "values"), field));
} else {
field.setNewModel(property.getValues());
item.add(field);
}
final AjaxCheckBoxPanel overridable = new AjaxCheckBoxPanel("connPropAttrOverridable",
"connPropAttrOverridable", new PropertyModel<Boolean>(property, "overridable"));
item.add(overridable);
connInstanceTO.addConfiguration(property);
>>>>>>>
if (ClassUtils.isPrimitiveOrWrapper(propertySchemaClass)) {
propertySchemaClass =
org.apache.commons.lang3.ClassUtils.primitiveToWrapper(propertySchemaClass);
}
} catch (Exception e) {
LOG.error("Error parsing attribute type", e);
propertySchemaClass = String.class;
}
if (ClassUtils.isAssignable(Number.class, propertySchemaClass)) {
field = new SpinnerFieldPanel<Number>("panel", label.getDefaultModelObjectAsString(),
(Class<Number>) propertySchemaClass, new Model<Number>(), null, null);
required = property.getSchema().isRequired();
} else if (ClassUtils.isAssignable(Boolean.class, propertySchemaClass)) {
field = new AjaxCheckBoxPanel("panel",
label.getDefaultModelObjectAsString(), new Model<Boolean>());
} else {
field = new AjaxTextFieldPanel("panel",
label.getDefaultModelObjectAsString(), new Model<String>());
required = property.getSchema().isRequired();
}
if (propertySchemaClass.isArray()) {
isArray = true;
}
}
field.setTitle(property.getSchema().getHelpMessage());
if (required) {
field.addRequiredLabel();
}
if (isArray) {
if (property.getValues().isEmpty()) {
property.getValues().add(null);
}
item.add(new MultiFieldPanel<String>(
"panel", new PropertyModel<List<String>>(property, "values"), field));
} else {
field.setNewModel(property.getValues());
item.add(field);
}
final AjaxCheckBoxPanel overridable = new AjaxCheckBoxPanel("connPropAttrOverridable",
"connPropAttrOverridable", new PropertyModel<Boolean>(property, "overridable"));
item.add(overridable);
connInstanceTO.getConfiguration().add(property);
<<<<<<<
final ConnConfProperty propertyTO = new ConnConfProperty();
propertyTO.setSchema(key);
if (connInstanceTO.getId() != 0
&& connInstanceTO.getConfigurationMap().containsKey(key.getName())
&& connInstanceTO.getConfigurationMap().get(key.getName()).getValues() != null) {
propertyTO.getValues().addAll(connInstanceTO.getConfigurationMap().get(key.getName()).getValues());
propertyTO.setOverridable(connInstanceTO.getConfigurationMap().get(key.getName()).isOverridable());
=======
final ConnConfProperty property = new ConnConfProperty();
property.setSchema(key);
if (connInstanceTO.getId() != 0 && connInstanceTO.getConfigurationMap().containsKey(key.getName())) {
property.getValues().addAll(
connInstanceTO.getConfigurationMap().get(key.getName()).getValues());
property.setOverridable(connInstanceTO.getConfigurationMap().get(key.getName()).isOverridable());
>>>>>>>
final ConnConfProperty property = new ConnConfProperty();
property.setSchema(key);
if (connInstanceTO.getId() != 0
&& connInstanceTO.getConfigurationMap().containsKey(key.getName())
&& connInstanceTO.getConfigurationMap().get(key.getName()).getValues() != null) {
property.getValues().addAll(connInstanceTO.getConfigurationMap().get(key.getName()).getValues());
property.setOverridable(connInstanceTO.getConfigurationMap().get(key.getName()).isOverridable()); |
<<<<<<<
protected AnyTO doCreate(final AnyTO anyTO, final SyncDelta delta, final ProvisioningResult result) {
UserTO userTO = UserTO.class.cast(anyTO);
=======
protected AbstractSubjectMod getSubjectMod(final AbstractSubjectTO subjectTO, final SyncDelta delta) {
return connObjectUtils.getAttributableMod(subjectTO.getKey(),
delta.getObject(),
subjectTO,
profile.getTask(),
getAttributableUtils());
}
@Override
protected AbstractSubjectTO doCreate(
final AbstractSubjectTO subjectTO, final SyncDelta delta, final ProvisioningResult result) {
UserTO userTO = UserTO.class.cast(subjectTO);
>>>>>>>
protected AnyTO doCreate(final AnyTO anyTO, final SyncDelta delta, final ProvisioningResult result) {
UserTO userTO = UserTO.class.cast(anyTO);
<<<<<<<
final UserMod userMod = UserMod.class.cast(anyMod);
final Boolean enabled = syncUtilities.readEnabled(delta.getObject(), profile.getTask());
=======
UserMod userMod = UserMod.class.cast(subjectMod);
Boolean enabled = syncUtilities.readEnabled(delta.getObject(), profile.getTask());
>>>>>>>
UserMod userMod = UserMod.class.cast(anyMod);
Boolean enabled = syncUtilities.readEnabled(delta.getObject(), profile.getTask()); |
<<<<<<<
import java.util.Hashtable;
=======
import java.util.ArrayList;
>>>>>>>
<<<<<<<
import java.util.Map;
import javax.naming.Context;
import javax.naming.directory.InitialDirContext;
import javax.ws.rs.core.Response;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.syncope.client.SyncopeClient;
=======
>>>>>>>
import javax.ws.rs.core.Response;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.syncope.client.SyncopeClient;
<<<<<<<
import org.apache.syncope.common.to.ResourceNameTO;
import org.apache.syncope.common.to.ResourceTO;
=======
>>>>>>>
import org.apache.syncope.common.to.ResourceNameTO;
<<<<<<<
import org.apache.syncope.common.types.ClientExceptionType;
import org.apache.syncope.common.types.ConnConfProperty;
import org.apache.syncope.common.types.Preference;
import org.apache.syncope.common.types.RESTHeaders;
import org.apache.syncope.common.types.ResourceAssociationActionType;
import org.apache.syncope.common.types.SchemaType;
import org.apache.syncope.common.util.CollectionWrapper;
=======
import org.apache.syncope.common.types.SyncopeClientExceptionType;
import org.apache.syncope.common.validation.SyncopeClientCompositeErrorException;
>>>>>>>
import org.apache.syncope.common.types.ClientExceptionType;
import org.apache.syncope.common.types.Preference;
import org.apache.syncope.common.types.RESTHeaders;
import org.apache.syncope.common.types.ResourceAssociationActionType;
import org.apache.syncope.common.types.SchemaType;
import org.apache.syncope.common.util.CollectionWrapper;
<<<<<<<
} catch (SyncopeClientException scce) {
// ignore
=======
} catch (SyncopeClientCompositeErrorException scce) {
assertNotNull(scce);
>>>>>>>
} catch (SyncopeClientException scce) {
assertNotNull(scce);
<<<<<<<
} catch (SyncopeClientException scce) {
// ignore
=======
} catch (SyncopeClientCompositeErrorException scce) {
assertNotNull(scce);
>>>>>>>
} catch (SyncopeClientException scce) {
assertNotNull(scce); |
<<<<<<<
public ResourceTO create(final ResourceTO resourceTO) {
LOG.debug("Resource creation: {}", resourceTO);
=======
@RequestMapping(method = RequestMethod.POST, value = "/create")
public ResourceTO create(final HttpServletResponse response, @RequestBody final ResourceTO resourceTO) {
>>>>>>>
public ResourceTO create(final ResourceTO resourceTO) {
<<<<<<<
public ResourceTO update(final ResourceTO resourceTO) {
LOG.debug("Role update request: {}", resourceTO);
=======
@RequestMapping(method = RequestMethod.POST, value = "/update")
public ResourceTO update(@RequestBody final ResourceTO resourceTO) {
>>>>>>>
public ResourceTO update(final ResourceTO resourceTO) {
<<<<<<<
public BulkActionRes bulk(final BulkAction bulkAction) {
LOG.debug("Bulk '{}' called on '{}'", bulkAction.getOperation(), bulkAction.getTargets());
=======
@RequestMapping(method = RequestMethod.POST, value = "/bulk")
public BulkActionRes bulkAction(@RequestBody final BulkAction bulkAction) {
>>>>>>>
public BulkActionRes bulk(final BulkAction bulkAction) { |
<<<<<<<
=======
import java.lang.reflect.Method;
import org.apache.syncope.common.AbstractBaseBean;
import org.apache.syncope.common.to.PropagationTaskTO;
import org.apache.syncope.common.to.SchedTaskTO;
import org.apache.syncope.common.to.SyncTaskTO;
import org.apache.syncope.common.to.TaskTO;
import org.apache.syncope.core.persistence.beans.NotificationTask;
import org.apache.syncope.core.persistence.beans.PropagationTask;
import org.apache.syncope.core.persistence.beans.SchedTask;
import org.apache.syncope.core.persistence.beans.SyncTask;
import org.apache.syncope.core.persistence.beans.Task;
import org.apache.syncope.core.util.AttributableUtil;
import org.apache.syncope.core.util.TaskUtil;
>>>>>>>
import java.lang.reflect.Method;
import org.apache.syncope.common.AbstractBaseBean;
<<<<<<<
@Transactional(rollbackFor = {Throwable.class})
abstract class AbstractController {
=======
public abstract class AbstractController<T extends AbstractBaseBean> {
>>>>>>>
public abstract class AbstractController<T extends AbstractBaseBean> {
<<<<<<<
=======
protected AttributableUtil getAttributableUtil(final String kind) {
AttributableUtil result = null;
try {
result = AttributableUtil.valueOf(kind.toUpperCase());
} catch (Exception e) {
LOG.error("Attributable not supported: " + kind);
throw new TypeMismatchException(kind, AttributableUtil.class, e);
}
return result;
}
protected TaskUtil getTaskUtil(final String kind) {
TaskUtil result = null;
try {
result = TaskUtil.valueOf(kind.toUpperCase());
} catch (Exception e) {
LOG.error("Task not supported: " + kind);
throw new TypeMismatchException(kind, TaskUtil.class, e);
}
return result;
}
protected TaskUtil getTaskUtil(final Task task) {
TaskUtil result = (task instanceof PropagationTask)
? TaskUtil.PROPAGATION
: (task instanceof NotificationTask)
? TaskUtil.NOTIFICATION
: (task instanceof SyncTask)
? TaskUtil.SYNC
: (task instanceof SchedTask)
? TaskUtil.SCHED
: null;
if (result == null) {
LOG.error("Task not supported: " + task.getClass().getName());
throw new TypeMismatchException(task.getClass().getName(), TaskUtil.class);
}
return result;
}
protected TaskUtil getTaskUtil(final TaskTO taskTO) {
TaskUtil result = (taskTO instanceof PropagationTaskTO)
? TaskUtil.PROPAGATION
: (taskTO instanceof SyncTaskTO)
? TaskUtil.SYNC
: (taskTO instanceof SchedTaskTO)
? TaskUtil.SCHED
: null;
if (result == null) {
LOG.error("Task not supported: " + taskTO.getClass().getName());
throw new TypeMismatchException(taskTO.getClass().getName(), TaskUtil.class);
}
return result;
}
/**
* Resolves stored bean (if existing) referred by the given CUD method.
* <br />
* Read-only methods will be unresolved for performance reasons.
*
* @param method method.
* @param args method arguments.
* @return referred stored bean.
* @throws UnresolvedReferenceException in case of failures, read-only methods and unresolved bean.
*/
public T resolveBeanReference(final Method method, final Object... args) throws UnresolvedReferenceException {
final Transactional transactional = method.getAnnotation(Transactional.class);
if (transactional != null && transactional.readOnly()) {
throw new UnresolvedReferenceException();
}
return resolveReference(method, args);
}
protected abstract T resolveReference(Method method, Object... args) throws UnresolvedReferenceException;
>>>>>>>
/**
* Resolves stored bean (if existing) referred by the given CUD method.
* <br />
* Read-only methods will be unresolved for performance reasons.
*
* @param method method.
* @param args method arguments.
* @return referred stored bean.
* @throws UnresolvedReferenceException in case of failures, read-only methods and unresolved bean.
*/
public T resolveBeanReference(final Method method, final Object... args) throws UnresolvedReferenceException {
final Transactional transactional = method.getAnnotation(Transactional.class);
if (transactional != null && transactional.readOnly()) {
throw new UnresolvedReferenceException();
}
return resolveReference(method, args);
}
protected abstract T resolveReference(Method method, Object... args) throws UnresolvedReferenceException; |
<<<<<<<
=======
output = actual;
resultStatus = Result.SUCCESS;
>>>>>>>
output = actual;
resultStatus = Result.SUCCESS;
<<<<<<<
notificationManager.createTasks(updated.getResult().getKey().getId(), updated.getPerformedTasks());
userTO = userDataBinder.getUserTO(updated.getResult().getKey().getId());
=======
final UserTO after = userDataBinder.getUserTO(updated.getResult().getKey());
>>>>>>>
final UserTO after = userDataBinder.getUserTO(updated.getResult().getKey().getId()); |
<<<<<<<
=======
@RequestMapping(method = RequestMethod.POST, value = "/search/count")
public ModelAndView searchCount(@RequestBody final NodeCond searchCondition)
throws InvalidSearchConditionException {
return new ModelAndView().addObject(searchCountInternal(searchCondition));
}
>>>>>>>
<<<<<<<
@PreAuthorize("isAuthenticated() "
+ "and not(hasRole(T(org.apache.syncope.common.SyncopeConstants).ANONYMOUS_ENTITLEMENT))")
=======
@PreAuthorize("isAuthenticated()")
@RequestMapping(method = RequestMethod.GET, value = "/read/self")
>>>>>>>
@PreAuthorize("isAuthenticated() "
+ "and not(hasRole(T(org.apache.syncope.common.SyncopeConstants).ANONYMOUS_ENTITLEMENT))")
<<<<<<<
=======
@RequestMapping(method = RequestMethod.POST, value = "/create")
public UserTO create(final HttpServletResponse response, @RequestBody final UserTO userTO) {
UserTO savedTO = createInternal(userTO);
response.setStatus(HttpServletResponse.SC_CREATED);
return savedTO;
}
>>>>>>>
<<<<<<<
=======
// Check that this operation is allowed to be performed by caller
>>>>>>>
<<<<<<<
=======
>>>>>>>
<<<<<<<
WorkflowResult<Map.Entry<Long, Boolean>> created = uwfAdapter.create(userTO);
=======
// Attributable transformation (if configured)
UserTO actual = attrTransformer.transform(userTO);
LOG.debug("Transformed: {}", actual);
/*
* Actual operations: workflow, propagation, notification
*/
WorkflowResult<Map.Entry<Long, Boolean>> created = uwfAdapter.create(actual);
>>>>>>>
// Attributable transformation (if configured)
UserTO actual = attrTransformer.transform(userTO);
LOG.debug("Transformed: {}", actual);
/*
* Actual operations: workflow, propagation, notification
*/
WorkflowResult<Map.Entry<Long, Boolean>> created = uwfAdapter.create(actual);
<<<<<<<
created, userTO.getPassword(), userTO.getVirAttrs());
final List<PropagationStatusTO> propagations = new ArrayList<PropagationStatusTO>();
final DefaultPropagationHandler propHanlder = new DefaultPropagationHandler(connObjectUtil, propagations);
=======
created, actual.getPassword(), actual.getVirtualAttributes());
PropagationReporter propagationReporter =
ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class);
>>>>>>>
created, actual.getPassword(), actual.getVirAttrs());
PropagationReporter propagationReporter =
ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class);
<<<<<<<
savedTO.getPropagationStatusTOs().addAll(propagations);
=======
savedTO.setPropagationStatusTOs(propagationReporter.getStatuses());
>>>>>>>
savedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
<<<<<<<
final String changedPwd = userMod.getPassword();
=======
// AttributableMod transformation (if configured)
UserMod actual = attrTransformer.transform(userMod);
LOG.debug("Transformed: {}", actual);
final String changedPwd = actual.getPassword();
>>>>>>>
final String changedPwd = userMod.getPassword();
// AttributableMod transformation (if configured)
UserMod actual = attrTransformer.transform(userMod);
LOG.debug("Transformed: {}", actual);
<<<<<<<
userMod.getVirAttrsToRemove(), userMod.getVirAttrsToUpdate());
=======
actual.getVirtualAttributesToBeRemoved(), actual.getVirtualAttributesToBeUpdated());
>>>>>>>
actual.getVirAttrsToRemove(), actual.getVirAttrsToUpdate());
<<<<<<<
Set<String> pwdResourceNames = userMod.getPwdPropRequest().getResources();
=======
Set<String> pwdResourceNames = actual.getPwdPropRequest().getResources();
>>>>>>>
Set<String> pwdResourceNames = actual.getPwdPropRequest().getResources();
<<<<<<<
toBeExcluded.addAll(userMod.getResourcesToAdd());
=======
toBeExcluded.addAll(actual.getResourcesToBeAdded());
>>>>>>>
toBeExcluded.addAll(actual.getResourcesToAdd());
<<<<<<<
userMod.getVirAttrsToRemove(),
userMod.getVirAttrsToUpdate(),
=======
actual.getVirtualAttributesToBeRemoved(),
actual.getVirtualAttributesToBeUpdated(),
>>>>>>>
actual.getVirAttrsToRemove(),
actual.getVirAttrsToUpdate(),
<<<<<<<
userMod.getVirAttrsToRemove(),
userMod.getVirAttrsToUpdate(),
=======
actual.getVirtualAttributesToBeRemoved(),
actual.getVirtualAttributesToBeUpdated(),
>>>>>>>
actual.getVirAttrsToRemove(),
actual.getVirAttrsToUpdate(),
<<<<<<<
final List<PropagationStatusTO> propagations = new ArrayList<PropagationStatusTO>();
final DefaultPropagationHandler propHanlder = new DefaultPropagationHandler(connObjectUtil, propagations);
=======
PropagationReporter propagationReporter =
ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class);
>>>>>>>
PropagationReporter propagationReporter =
ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class);
<<<<<<<
public UserTO activate(final Long userId, final String token, final PropagationRequestTO propagationRequestTO) {
=======
public UserTO activate(@PathVariable("userId") final Long userId,
@RequestParam(required = true) final String token,
@RequestBody final PropagationRequestTO propagationRequestTO) {
>>>>>>>
public UserTO activate(final Long userId, final String token, final PropagationRequestTO propagationRequestTO) {
<<<<<<<
public UserTO activate(final String username, final String token) {
=======
public UserTO activate(@PathVariable("username") final String username,
@RequestParam(required = true) final String token) {
>>>>>>>
public UserTO activate(final String username, final String token) {
<<<<<<<
public UserTO activate(final String username, final String token, final PropagationRequestTO propagationRequestTO) {
=======
public UserTO activate(@PathVariable("username") final String username,
@RequestParam(required = true) final String token,
@RequestBody final PropagationRequestTO propagationRequestTO) {
>>>>>>>
public UserTO activate(final String username, final String token, final PropagationRequestTO propagationRequestTO) {
<<<<<<<
public UserTO suspend(final Long userId) {
=======
public UserTO suspend(@PathVariable("userId") final Long userId) {
>>>>>>>
public UserTO suspend(final Long userId) {
<<<<<<<
public UserTO suspend(final Long userId, final PropagationRequestTO propagationRequestTO) {
=======
public UserTO suspend(@PathVariable("userId") final Long userId,
@RequestBody final PropagationRequestTO propagationRequestTO) {
>>>>>>>
public UserTO suspend(final Long userId, final PropagationRequestTO propagationRequestTO) {
<<<<<<<
public UserTO suspend(final String username) {
=======
public UserTO suspend(@PathVariable("username") final String username) {
>>>>>>>
public UserTO suspend(final String username) {
<<<<<<<
public UserTO suspend(final String username, final PropagationRequestTO propagationRequestTO) {
=======
public UserTO suspend(@PathVariable("username") final String username,
@RequestBody final PropagationRequestTO propagationRequestTO) {
>>>>>>>
public UserTO suspend(final String username, final PropagationRequestTO propagationRequestTO) {
<<<<<<<
public UserTO reactivate(final Long userId) {
=======
public UserTO reactivate(@PathVariable("userId") final Long userId) {
>>>>>>>
public UserTO reactivate(final Long userId) {
<<<<<<<
public UserTO reactivate(final Long userId, final PropagationRequestTO propagationRequestTO) {
=======
public UserTO reactivate(@PathVariable("userId") final Long userId,
@RequestBody final PropagationRequestTO propagationRequestTO) {
>>>>>>>
public UserTO reactivate(final Long userId, final PropagationRequestTO propagationRequestTO) {
<<<<<<<
=======
@RequestMapping(method = RequestMethod.GET, value = "/reactivateByUsername/{username}")
>>>>>>>
<<<<<<<
public UserTO reactivate(final String username, final PropagationRequestTO propagationRequestTO) {
=======
public UserTO reactivate(@PathVariable("username") final String username,
@RequestBody final PropagationRequestTO propagationRequestTO) {
>>>>>>>
public UserTO reactivate(final String username, final PropagationRequestTO propagationRequestTO) {
<<<<<<<
=======
@PreAuthorize("hasRole('USER_UPDATE')")
@RequestMapping(method = RequestMethod.POST, value = "/execute/workflow/{taskId}")
public UserTO executeWorkflow(@RequestBody final UserTO userTO, @PathVariable("taskId") final String taskId) {
LOG.debug("About to execute {} on {}", taskId, userTO.getId());
WorkflowResult<Long> updated = uwfAdapter.execute(userTO, taskId);
List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(
new WorkflowResult<Map.Entry<Long, Boolean>>(new SimpleEntry<Long, Boolean>(updated.getResult(), null),
updated.getPropByRes(), updated.getPerformedTasks()));
taskExecutor.execute(tasks);
notificationManager.createTasks(updated.getResult(), updated.getPerformedTasks());
final UserTO savedTO = binder.getUserTO(updated.getResult());
LOG.debug("About to return updated user\n{}", savedTO);
auditManager.audit(Category.user, UserSubCategory.executeWorkflow, Result.success,
"Successfully executed workflow action " + taskId + " on user: " + userTO.getUsername());
return savedTO;
}
@PreAuthorize("hasRole('WORKFLOW_FORM_LIST')")
@RequestMapping(method = RequestMethod.GET, value = "/workflow/form/list")
@Transactional(rollbackFor = {Throwable.class})
public List<WorkflowFormTO> getForms() {
List<WorkflowFormTO> forms = uwfAdapter.getForms();
auditManager.audit(Category.user, UserSubCategory.getForms, Result.success,
"Successfully list workflow forms: " + forms.size());
return forms;
}
@PreAuthorize("hasRole('WORKFLOW_FORM_READ') and hasRole('USER_READ')")
@RequestMapping(method = RequestMethod.GET, value = "/workflow/form/{userId}")
@Transactional(rollbackFor = {Throwable.class})
public WorkflowFormTO getFormForUser(@PathVariable("userId") final Long userId) {
SyncopeUser user = binder.getUserFromId(userId);
WorkflowFormTO result = uwfAdapter.getForm(user.getWorkflowId());
auditManager.audit(Category.user, UserSubCategory.getFormForUser, Result.success,
"Successfully read workflow form for user: " + user.getUsername());
return result;
}
@PreAuthorize("hasRole('WORKFLOW_FORM_CLAIM')")
@RequestMapping(method = RequestMethod.GET, value = "/workflow/form/claim/{taskId}")
@Transactional(rollbackFor = {Throwable.class})
public WorkflowFormTO claimForm(@PathVariable("taskId") final String taskId) {
WorkflowFormTO result = uwfAdapter.claimForm(taskId,
SecurityContextHolder.getContext().getAuthentication().getName());
auditManager.audit(Category.user, UserSubCategory.claimForm, Result.success,
"Successfully claimed workflow form: " + taskId);
return result;
}
@PreAuthorize("hasRole('WORKFLOW_FORM_SUBMIT')")
@RequestMapping(method = RequestMethod.POST, value = "/workflow/form/submit")
@Transactional(rollbackFor = {Throwable.class})
public UserTO submitForm(@RequestBody final WorkflowFormTO form) {
LOG.debug("About to process form {}", form);
WorkflowResult<Map.Entry<Long, String>> updated = uwfAdapter.submitForm(form,
SecurityContextHolder.getContext().getAuthentication().getName());
// propByRes can be made empty by the workflow definition is no propagation should occur
// (for example, with rejected users)
if (updated.getPropByRes() != null && !updated.getPropByRes().isEmpty()) {
List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(
new WorkflowResult<Map.Entry<Long, Boolean>>(
new SimpleEntry<Long, Boolean>(updated.getResult().getKey(), Boolean.TRUE),
updated.getPropByRes(),
updated.getPerformedTasks()),
updated.getResult().getValue(),
null,
null,
null);
taskExecutor.execute(tasks);
}
final UserTO savedTO = binder.getUserTO(updated.getResult().getKey());
auditManager.audit(Category.user, UserSubCategory.submitForm, Result.success,
"Successfully submitted workflow form for user: " + savedTO.getUsername());
LOG.debug("About to return user after form processing\n{}", savedTO);
return savedTO;
}
>>>>>>>
<<<<<<<
LOG.debug("About to set 'enabled:{}' status to {}", user, status);
=======
LOG.debug("About to set status of {}" + user);
>>>>>>>
LOG.debug("About to set 'enabled:{}' status to {}", user, status);
<<<<<<<
SyncopeClientCompositeException sccee =
new SyncopeClientCompositeException(Response.Status.BAD_REQUEST.getStatusCode());
=======
SyncopeClientCompositeErrorException sccee =
new SyncopeClientCompositeErrorException(HttpStatus.BAD_REQUEST);
>>>>>>>
SyncopeClientCompositeException sccee =
new SyncopeClientCompositeException(Response.Status.BAD_REQUEST.getStatusCode()); |
<<<<<<<
import org.apache.syncope.core.util.JexlUtil;
=======
import org.apache.syncope.core.util.jexl.JexlUtil;
import org.springframework.beans.BeanUtils;
>>>>>>>
import org.apache.syncope.core.util.jexl.JexlUtil; |
<<<<<<<
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public int count() {
=======
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public int countInternal() {
>>>>>>>
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public int count() {
<<<<<<<
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public int searchCount(final NodeCond searchCondition) throws InvalidSearchConditionException {
=======
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public int searchCountInternal(final NodeCond searchCondition) throws InvalidSearchConditionException {
>>>>>>>
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public int searchCount(final NodeCond searchCondition) throws InvalidSearchConditionException {
<<<<<<<
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
=======
@RequestMapping(method = RequestMethod.GET, value = "/list")
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
>>>>>>>
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
<<<<<<<
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public List<UserTO> list(final int page, final int size) {
=======
@RequestMapping(method = RequestMethod.GET, value = "/list/{page}/{size}")
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<UserTO> list(@PathVariable("page") final int page, @PathVariable("size") final int size) {
>>>>>>>
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<UserTO> list(final int page, final int size) {
<<<<<<<
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public List<UserTO> search(final NodeCond searchCondition, final int page, final int size)
=======
@RequestMapping(method = RequestMethod.POST, value = "/search/{page}/{size}")
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<UserTO> search(@RequestBody final NodeCond searchCondition, @PathVariable("page") final int page,
@PathVariable("size") final int size)
>>>>>>>
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<UserTO> search(final NodeCond searchCondition, final int page, final int size)
<<<<<<<
public UserTO create(final UserTO userTO) {
LOG.debug("User create called with {}", userTO);
=======
public UserTO createInternal(final UserTO userTO) {
// Check that this operation is allowed to be performed by caller
>>>>>>>
public UserTO create(final UserTO userTO) {
<<<<<<<
savedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
LOG.debug("About to return created user\n{}", savedTO);
=======
savedTO.setPropagationStatusTOs(propagationReporter.getStatuses());
>>>>>>>
savedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
<<<<<<<
public UserTO update(final UserMod userMod) {
LOG.debug("User update called with {}", userMod);
=======
@RequestMapping(method = RequestMethod.POST, value = "/update")
public UserTO update(@RequestBody final UserMod userMod) {
>>>>>>>
public UserTO update(final UserMod userMod) {
<<<<<<<
final UserTO updatedTO = binder.getUserTO(updated.getResult().getKey().getId());
updatedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
LOG.debug("About to return updated user\n{}", updatedTO);
=======
// 4. prepare result, including propagation status on external resources
final UserTO updatedTO = binder.getUserTO(updated.getResult().getKey());
updatedTO.setPropagationStatusTOs(propagationReporter.getStatuses());
>>>>>>>
final UserTO updatedTO = binder.getUserTO(updated.getResult().getKey().getId());
updatedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
<<<<<<<
@Transactional(rollbackFor = {Throwable.class})
public UserTO status(final StatusMod statusMod) {
LOG.debug("About to mod status {}", statusMod);
SyncopeUser user = binder.getUserFromId(statusMod.getId());
=======
@RequestMapping(method = RequestMethod.POST, value = "/execute/workflow/{taskId}")
public UserTO executeWorkflow(@RequestBody final UserTO userTO, @PathVariable("taskId") final String taskId) {
WorkflowResult<Long> updated = uwfAdapter.execute(userTO, taskId);
List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(
new WorkflowResult<Map.Entry<Long, Boolean>>(new SimpleEntry<Long, Boolean>(updated.getResult(), null),
updated.getPropByRes(), updated.getPerformedTasks()));
taskExecutor.execute(tasks);
return binder.getUserTO(updated.getResult());
}
@PreAuthorize("hasRole('WORKFLOW_FORM_LIST')")
@RequestMapping(method = RequestMethod.GET, value = "/workflow/form/list")
@Transactional(rollbackFor = { Throwable.class })
public List<WorkflowFormTO> getForms() {
return uwfAdapter.getForms();
}
@PreAuthorize("hasRole('WORKFLOW_FORM_READ') and hasRole('USER_READ')")
@RequestMapping(method = RequestMethod.GET, value = "/workflow/form/{userId}")
@Transactional(rollbackFor = { Throwable.class })
public WorkflowFormTO getFormForUser(@PathVariable("userId") final Long userId) {
SyncopeUser user = binder.getUserFromId(userId);
return uwfAdapter.getForm(user.getWorkflowId());
}
@PreAuthorize("hasRole('WORKFLOW_FORM_CLAIM')")
@RequestMapping(method = RequestMethod.GET, value = "/workflow/form/claim/{taskId}")
@Transactional(rollbackFor = { Throwable.class })
public WorkflowFormTO claimForm(@PathVariable("taskId") final String taskId) {
return uwfAdapter.claimForm(taskId, SecurityContextHolder.getContext().getAuthentication().getName());
}
@PreAuthorize("hasRole('WORKFLOW_FORM_SUBMIT')")
@RequestMapping(method = RequestMethod.POST, value = "/workflow/form/submit")
@Transactional(rollbackFor = { Throwable.class })
public UserTO submitForm(@RequestBody final WorkflowFormTO form) {
WorkflowResult<Map.Entry<Long, String>> updated = uwfAdapter.submitForm(form,
SecurityContextHolder.getContext().getAuthentication().getName());
// propByRes can be made empty by the workflow definition is no propagation should occur
// (for example, with rejected users)
if (updated.getPropByRes() != null && !updated.getPropByRes().isEmpty()) {
List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(
new WorkflowResult<Map.Entry<Long, Boolean>>(
new SimpleEntry<Long, Boolean>(updated.getResult().getKey(), Boolean.TRUE),
updated.getPropByRes(),
updated.getPerformedTasks()),
updated.getResult().getValue(),
null,
null,
null);
taskExecutor.execute(tasks);
}
return binder.getUserTO(updated.getResult().getKey());
}
protected UserTO setStatus(final SyncopeUser user, final String token,
final PropagationRequestTO propagationRequestTO, final boolean status, final String task) {
>>>>>>>
@Transactional(rollbackFor = { Throwable.class })
public UserTO status(final StatusMod statusMod) {
SyncopeUser user = binder.getUserFromId(statusMod.getId());
<<<<<<<
savedTO.getPropagationStatusTOs().addAll(propReporter.getStatuses());
LOG.debug("About to return updated user\n{}", savedTO);
=======
savedTO.setPropagationStatusTOs(propReporter.getStatuses());
>>>>>>>
savedTO.getPropagationStatusTOs().addAll(propReporter.getStatuses());
<<<<<<<
final UserTO deletedTO;
SyncopeUser deleted = userDAO.find(userId);
if (deleted == null) {
deletedTO = new UserTO();
deletedTO.setId(userId);
} else {
deletedTO = binder.getUserTO(userId);
}
deletedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
LOG.debug("User successfully deleted: {}", userId);
return deletedTO;
=======
return userTO;
>>>>>>>
final UserTO deletedTO;
SyncopeUser deleted = userDAO.find(userId);
if (deleted == null) {
deletedTO = new UserTO();
deletedTO.setId(userId);
} else {
deletedTO = binder.getUserTO(userId);
}
deletedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
return deletedTO;
<<<<<<<
public BulkActionRes bulk(final BulkAction bulkAction) {
LOG.debug("Bulk '{}' called on '{}'", bulkAction.getOperation(), bulkAction.getTargets());
=======
@RequestMapping(method = RequestMethod.POST, value = "/bulk")
public BulkActionRes bulkAction(@RequestBody final BulkAction bulkAction) {
>>>>>>>
public BulkActionRes bulk(final BulkAction bulkAction) { |
<<<<<<<
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.syncope.common.SyncopeConstants;
=======
import org.apache.syncope.console.commons.PageUtils;
>>>>>>>
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.syncope.common.SyncopeConstants;
import org.apache.syncope.console.commons.PageUtils; |
<<<<<<<
import org.apache.commons.lang3.ArrayUtils;
import org.apache.syncope.common.types.AuditElements.AuthenticationSubCategory;
import org.apache.syncope.common.types.AuditElements.Category;
=======
import org.apache.commons.lang.ArrayUtils;
import org.apache.syncope.common.types.AuditElements;
>>>>>>>
import org.apache.commons.lang3.ArrayUtils;
import org.apache.syncope.common.types.AuditElements; |
<<<<<<<
=======
private static final TokenSet commandSet = TokenSet.create(GENERIC_COMMAND_ELEMENT, SIMPLE_COMMAND_ELEMENT);
private static final TokenSet subshellSet = TokenSet.create(SUBSHELL_COMMAND, ARITHMETIC_COMMAND, PARAM_EXPANSION_ELEMENT, VAR_COMPOSED_VAR_ELEMENT);
private static final Spacing NO_SPACING_WITH_NEWLINE = Spacing.createSpacing(0, 0, 0, true, 1);
>>>>>>>
private static final TokenSet commandSet = TokenSet.create(GENERIC_COMMAND_ELEMENT, SIMPLE_COMMAND_ELEMENT);
private static final Spacing NO_SPACING_WITH_NEWLINE = Spacing.createSpacing(0, 0, 0, true, 1); |
<<<<<<<
@Component
public class RoleController extends AbstractResourceAssociator<RoleTO> {
/**
* Logger.
*/
protected static final Logger LOG = LoggerFactory.getLogger(RoleController.class);
=======
/**
* Note that this controller does not extend AbstractTransactionalController, hence does not provide any
* Spring's Transactional logic at class level.
*
* @see AbstractTransactionalController
*/
@Controller
@RequestMapping("/role")
public class RoleController extends AbstractController<RoleTO> {
>>>>>>>
/**
* Note that this controller does not extend AbstractTransactionalController, hence does not provide any
* Spring's Transactional logic at class level.
*
* @see AbstractTransactionalController
*/
@Component
public class RoleController extends AbstractResourceAssociator<RoleTO> {
<<<<<<<
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public List<RoleTO> search(final NodeCond searchCondition)
=======
@RequestMapping(method = RequestMethod.POST, value = "/search")
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<RoleTO> search(@RequestBody final NodeCond searchCondition)
>>>>>>>
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<RoleTO> search(final NodeCond searchCondition)
<<<<<<<
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public List<RoleTO> search(final NodeCond searchCondition, final int page, final int size)
=======
@RequestMapping(method = RequestMethod.POST, value = "/search/{page}/{size}")
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<RoleTO> search(@RequestBody final NodeCond searchCondition, @PathVariable("page") final int page,
@PathVariable("size") final int size)
>>>>>>>
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<RoleTO> search(final NodeCond searchCondition, final int page, final int size)
<<<<<<<
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public int searchCount(final NodeCond searchCondition)
=======
@RequestMapping(method = RequestMethod.POST, value = "/search/count")
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public ModelAndView searchCount(@RequestBody final NodeCond searchCondition)
>>>>>>>
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public int searchCount(final NodeCond searchCondition)
<<<<<<<
public RoleTO create(final RoleTO roleTO) {
LOG.debug("Role create called with parameters {}", roleTO);
=======
@RequestMapping(method = RequestMethod.POST, value = "/create")
public RoleTO create(final HttpServletResponse response, @RequestBody final RoleTO roleTO) {
>>>>>>>
public RoleTO create(final RoleTO roleTO) {
<<<<<<<
savedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
LOG.debug("About to return created role\n{}", savedTO);
=======
savedTO.setPropagationStatusTOs(propagationReporter.getStatuses());
>>>>>>>
savedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
<<<<<<<
public RoleTO update(final RoleMod roleMod) {
LOG.debug("Role update called with {}", roleMod);
=======
@RequestMapping(method = RequestMethod.POST, value = "/update")
public RoleTO update(@RequestBody final RoleMod roleMod) {
>>>>>>>
public RoleTO update(final RoleMod roleMod) {
<<<<<<<
updatedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
LOG.debug("About to return updated role\n{}", updatedTO);
=======
updatedTO.setPropagationStatusTOs(propagationReporter.getStatuses());
>>>>>>>
updatedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
<<<<<<<
public RoleTO delete(final Long roleId) {
LOG.debug("Role delete called for {}", roleId);
List<SyncopeRole> ownedRoles = roleDAO.findOwnedByRole(roleId);
if (!ownedRoles.isEmpty()) {
List<String> owned = new ArrayList<String>(ownedRoles.size());
for (SyncopeRole role : ownedRoles) {
owned.add(role.getId() + " " + role.getName());
}
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.RoleOwnership);
sce.getElements().addAll(owned);
throw sce;
}
=======
@RequestMapping(method = RequestMethod.GET, value = "/delete/{roleId}")
public RoleTO delete(@PathVariable("roleId") final Long roleId) {
>>>>>>>
public RoleTO delete(final Long roleId) {
List<SyncopeRole> ownedRoles = roleDAO.findOwnedByRole(roleId);
if (!ownedRoles.isEmpty()) {
List<String> owned = new ArrayList<String>(ownedRoles.size());
for (SyncopeRole role : ownedRoles) {
owned.add(role.getId() + " " + role.getName());
}
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.RoleOwnership);
sce.getElements().addAll(owned);
throw sce;
} |
<<<<<<<
AbstractNormalSchema schema = null;
=======
AbstractSchema schema = null;
boolean readOnlyVirSchema = false;
>>>>>>>
AbstractNormalSchema schema = null;
boolean readOnlyVirSchema = false;
<<<<<<<
Map.Entry<String, Attribute> result = null;
final ConfigurableApplicationContext context = ApplicationContextProvider.getApplicationContext();
=======
ConfigurableApplicationContext context = ApplicationContextProvider.getApplicationContext();
>>>>>>>
final Map.Entry<String, Attribute> result;
final ConfigurableApplicationContext context = ApplicationContextProvider.getApplicationContext();
<<<<<<<
final SchemaDAO schemaDAO = context.getBean(SchemaDAO.class);
=======
SchemaDAO schemaDAO = context.getBean(SchemaDAO.class);
>>>>>>>
final SchemaDAO schemaDAO = context.getBean(SchemaDAO.class);
<<<<<<<
JexlUtil.addFieldsToContext(subject, jexlContext);
JexlUtil.addAttrsToContext(subject.getAttrs(), jexlContext);
JexlUtil.addDerAttrsToContext(subject.getDerAttrs(), subject.getAttrs(), jexlContext);
evalAccountLink = JexlUtil.evaluate(attrUtil.getAccountLink(resource), jexlContext);
=======
JexlUtil.addFieldsToContext(subject, jexlContext);
JexlUtil.addAttrsToContext(subject.getAttributes(), jexlContext);
JexlUtil.addDerAttrsToContext(subject.getDerivedAttributes(), subject.getAttributes(), jexlContext);
evalAccountLink = JexlUtil.evaluate(attrUtil.getAccountLink(resource), jexlContext);
>>>>>>>
JexlUtil.addFieldsToContext(subject, jexlContext);
JexlUtil.addAttrsToContext(subject.getAttrs(), jexlContext);
JexlUtil.addDerAttrsToContext(subject.getDerAttrs(), subject.getAttrs(), jexlContext);
evalAccountLink = JexlUtil.evaluate(attrUtil.getAccountLink(resource), jexlContext); |
<<<<<<<
import javax.annotation.Resource;
=======
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.ArrayUtils;
>>>>>>>
import javax.annotation.Resource;
import org.apache.commons.lang3.ArrayUtils;
<<<<<<<
import org.apache.syncope.common.types.AuditElements;
import org.apache.syncope.common.types.AuditElements.Category;
import org.apache.syncope.common.types.AuditElements.Result;
import org.apache.syncope.common.types.AuditElements.RoleSubCategory;
import org.apache.syncope.common.types.ClientExceptionType;
import org.apache.syncope.common.validation.SyncopeClientException;
import org.apache.syncope.core.audit.AuditManager;
=======
>>>>>>>
import org.apache.syncope.common.types.ClientExceptionType;
import org.apache.syncope.common.validation.SyncopeClientException;
<<<<<<<
@Component
public class RoleController extends AbstractResourceAssociator<RoleTO> {
/**
* Logger.
*/
protected static final Logger LOG = LoggerFactory.getLogger(RoleController.class);
@Autowired
protected AuditManager auditManager;
=======
@Controller
@RequestMapping("/role")
public class RoleController extends AbstractController<RoleTO> {
>>>>>>>
@Component
public class RoleController extends AbstractResourceAssociator<RoleTO> {
/**
* Logger.
*/
protected static final Logger LOG = LoggerFactory.getLogger(RoleController.class);
<<<<<<<
auditManager.audit(Category.role, RoleSubCategory.create, Result.success,
"Successfully created role: " + savedTO.getId());
=======
response.setStatus(HttpServletResponse.SC_CREATED);
>>>>>>>
<<<<<<<
@PreAuthorize("hasRole('ROLE_UPDATE')")
@Transactional(rollbackFor = { Throwable.class })
@Override
public RoleTO unlink(final Long roleId, final Collection<String> resources) {
LOG.debug("About to unlink role({}) and resources {}", roleId, resources);
final RoleMod roleMod = new RoleMod();
roleMod.setId(roleId);
roleMod.getResourcesToRemove().addAll(resources);
final WorkflowResult<Long> updated = rwfAdapter.update(roleMod);
final RoleTO updatedTO = binder.getRoleTO(updated.getResult());
auditManager.audit(Category.user, AuditElements.RoleSubCategory.update, Result.success,
"Successfully updated role: " + updatedTO.getName());
LOG.debug("About to return updated role\n{}", updatedTO);
return updatedTO;
}
@PreAuthorize("hasRole('ROLE_UPDATE')")
@Transactional(rollbackFor = { Throwable.class })
@Override
public RoleTO unassign(final Long roleId, final Collection<String> resources) {
LOG.debug("About to unassign role({}) and resources {}", roleId, resources);
final RoleMod roleMod = new RoleMod();
roleMod.setId(roleId);
roleMod.getResourcesToRemove().addAll(resources);
return update(roleMod);
}
@PreAuthorize("hasRole('ROLE_UPDATE')")
@Transactional(rollbackFor = { Throwable.class })
@Override
public RoleTO deprovision(final Long roleId, final Collection<String> resources) {
LOG.debug("About to deprovision role({}) from resources {}", roleId, resources);
final SyncopeRole role = binder.getRoleFromId(roleId);
final Set<String> noPropResourceName = role.getResourceNames();
noPropResourceName.removeAll(resources);
final List<PropagationTask> tasks = propagationManager.getRoleDeleteTaskIds(roleId, noPropResourceName);
PropagationReporter propagationReporter = ApplicationContextProvider.getApplicationContext().getBean(
PropagationReporter.class);
try {
taskExecutor.execute(tasks, propagationReporter);
} catch (PropagationException e) {
LOG.error("Error propagation primary resource", e);
propagationReporter.onPrimaryResourceFailure(tasks);
}
final RoleTO updatedTO = binder.getRoleTO(role);
updatedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
auditManager.audit(Category.user, AuditElements.RoleSubCategory.update, Result.success,
"Successfully deprovisioned role: " + updatedTO.getName());
LOG.debug("About to return updated role\n{}", updatedTO);
return updatedTO;
}
=======
/**
* {@inheritDoc}
*/
@Override
protected RoleTO resolveReference(final Method method, final Object... args) throws UnresolvedReferenceException {
Long id = null;
if (ArrayUtils.isNotEmpty(args)) {
for (int i = 0; id == null && i < args.length; i++) {
if (args[i] instanceof Long) {
id = (Long) args[i];
} else if (args[i] instanceof RoleTO) {
id = ((RoleTO) args[i]).getId();
} else if (args[i] instanceof RoleMod) {
id = ((RoleMod) args[i]).getId();
}
}
}
if (id != null) {
try {
return binder.getRoleTO(id);
} catch (Throwable ignore) {
LOG.debug("Unresolved reference", ignore);
throw new UnresolvedReferenceException(ignore);
}
}
throw new UnresolvedReferenceException();
}
>>>>>>>
@PreAuthorize("hasRole('ROLE_UPDATE')")
@Transactional(rollbackFor = {Throwable.class})
@Override
public RoleTO unlink(final Long roleId, final Collection<String> resources) {
LOG.debug("About to unlink role({}) and resources {}", roleId, resources);
final RoleMod roleMod = new RoleMod();
roleMod.setId(roleId);
roleMod.getResourcesToRemove().addAll(resources);
final WorkflowResult<Long> updated = rwfAdapter.update(roleMod);
final RoleTO updatedTO = binder.getRoleTO(updated.getResult());
LOG.debug("About to return updated role\n{}", updatedTO);
return updatedTO;
}
@PreAuthorize("hasRole('ROLE_UPDATE')")
@Transactional(rollbackFor = {Throwable.class})
@Override
public RoleTO unassign(final Long roleId, final Collection<String> resources) {
LOG.debug("About to unassign role({}) and resources {}", roleId, resources);
final RoleMod roleMod = new RoleMod();
roleMod.setId(roleId);
roleMod.getResourcesToRemove().addAll(resources);
return update(roleMod);
}
@PreAuthorize("hasRole('ROLE_UPDATE')")
@Transactional(rollbackFor = {Throwable.class})
@Override
public RoleTO deprovision(final Long roleId, final Collection<String> resources) {
LOG.debug("About to deprovision role({}) from resources {}", roleId, resources);
final SyncopeRole role = binder.getRoleFromId(roleId);
final Set<String> noPropResourceName = role.getResourceNames();
noPropResourceName.removeAll(resources);
final List<PropagationTask> tasks = propagationManager.getRoleDeleteTaskIds(roleId, noPropResourceName);
PropagationReporter propagationReporter = ApplicationContextProvider.getApplicationContext().getBean(
PropagationReporter.class);
try {
taskExecutor.execute(tasks, propagationReporter);
} catch (PropagationException e) {
LOG.error("Error propagation primary resource", e);
propagationReporter.onPrimaryResourceFailure(tasks);
}
final RoleTO updatedTO = binder.getRoleTO(role);
updatedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
LOG.debug("About to return updated role\n{}", updatedTO);
return updatedTO;
}
/**
* {@inheritDoc}
*/
@Override
protected RoleTO resolveReference(final Method method, final Object... args) throws UnresolvedReferenceException {
Long id = null;
if (ArrayUtils.isNotEmpty(args)) {
for (int i = 0; id == null && i < args.length; i++) {
if (args[i] instanceof Long) {
id = (Long) args[i];
} else if (args[i] instanceof RoleTO) {
id = ((RoleTO) args[i]).getId();
} else if (args[i] instanceof RoleMod) {
id = ((RoleMod) args[i]).getId();
}
}
}
if (id != null) {
try {
return binder.getRoleTO(id);
} catch (Throwable ignore) {
LOG.debug("Unresolved reference", ignore);
throw new UnresolvedReferenceException(ignore);
}
}
throw new UnresolvedReferenceException();
} |
<<<<<<<
import java.util.Set;
=======
import java.util.Locale;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.ArrayUtils;
>>>>>>>
import java.util.Set;
import org.apache.commons.lang3.ArrayUtils;
<<<<<<<
import org.springframework.stereotype.Component;
@Component
public class PolicyController extends AbstractController {
=======
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.ModelAndView;
@Controller
@RequestMapping("/policy")
public class PolicyController extends AbstractTransactionalController<PolicyTO> {
>>>>>>>
import org.springframework.stereotype.Component;
@Component
public class PolicyController extends AbstractTransactionalController<AbstractPolicyTO> {
<<<<<<<
auditManager.audit(Category.policy, PolicySubCategory.list, Result.success,
"Successfully listed all policies (" + type + "): " + policyTOs.size());
=======
>>>>>>>
<<<<<<<
public Set<String> getSyncCorrelationRuleClasses() {
final Set<String> correlationRules =
classNamesLoader.getClassNames(ImplementationClassNamesLoader.Type.SYNC_CORRELATION_RULES);
=======
@RequestMapping(method = RequestMethod.GET, value = "/syncCorrelationRuleClasses")
public ModelAndView getSyncCorrelationRuleClasses() {
return new ModelAndView().addObject(
classNamesLoader.getClassNames(ImplementationClassNamesLoader.Type.SYNC_CORRELATION_RULES));
}
/**
* {@inheritDoc}
*/
@Override
protected PolicyTO resolveReference(final Method method, final Object... args) throws
UnresolvedReferenceException {
Long id = null;
if (ArrayUtils.isNotEmpty(args)) {
for (int i = 0; id == null && i < args.length; i++) {
if (args[i] instanceof Long) {
id = (Long) args[i];
} else if (args[i] instanceof PolicyTO) {
id = ((PolicyTO) args[i]).getId();
}
}
}
>>>>>>>
public Set<String> getSyncCorrelationRuleClasses() {
return classNamesLoader.getClassNames(ImplementationClassNamesLoader.Type.SYNC_CORRELATION_RULES);
}
/**
* {@inheritDoc}
*/
@Override
protected AbstractPolicyTO resolveReference(final Method method, final Object... args)
throws UnresolvedReferenceException {
Long id = null;
if (ArrayUtils.isNotEmpty(args)) {
for (int i = 0; id == null && i < args.length; i++) {
if (args[i] instanceof Long) {
id = (Long) args[i];
} else if (args[i] instanceof AbstractPolicyTO) {
id = ((AbstractPolicyTO) args[i]).getId();
}
}
}
<<<<<<<
return correlationRules;
=======
throw new UnresolvedReferenceException();
>>>>>>>
throw new UnresolvedReferenceException(); |
<<<<<<<
BookmarkablePageLink<Page> usersLink = new BookmarkablePageLink<Page>("users", Users.class);
MetaDataRoleAuthorizationStrategy.authorize(
usersLink, WebPage.ENABLE, xmlRolesReader.getAllAllowedRoles("Users", "list"));
=======
BookmarkablePageLink<Void> usersLink = new BookmarkablePageLink<Void>("users", Users.class);
String allowedUsersRoles = xmlRolesReader.getEntitlement("Users", "list");
MetaDataRoleAuthorizationStrategy.authorize(usersLink, WebPage.ENABLE, allowedUsersRoles);
>>>>>>>
BookmarkablePageLink<Page> usersLink = new BookmarkablePageLink<Page>("users", Users.class);
MetaDataRoleAuthorizationStrategy.authorize(
usersLink, WebPage.ENABLE, xmlRolesReader.getEntitlement("Users", "list"));
<<<<<<<
BookmarkablePageLink<Page> todoLink = new BookmarkablePageLink<Page>("todo", Todo.class);
MetaDataRoleAuthorizationStrategy.authorize(
todoLink, WebPage.ENABLE, xmlRolesReader.getAllAllowedRoles("Approval", "list"));
=======
BookmarkablePageLink<Void> todoLink = new BookmarkablePageLink<Void>("todo", Todo.class);
MetaDataRoleAuthorizationStrategy.authorize(todoLink, WebPage.ENABLE, xmlRolesReader.getEntitlement(
"Approval", "list"));
>>>>>>>
BookmarkablePageLink<Page> todoLink = new BookmarkablePageLink<Page>("todo", Todo.class);
MetaDataRoleAuthorizationStrategy.authorize(
todoLink, WebPage.ENABLE, xmlRolesReader.getEntitlement("Approval", "list"));
<<<<<<<
BookmarkablePageLink<Page> reportLink = new BookmarkablePageLink<Page>("reports", Reports.class);
MetaDataRoleAuthorizationStrategy.authorize(
reportLink, WebPage.ENABLE, xmlRolesReader.getAllAllowedRoles("Reports", "list"));
=======
BookmarkablePageLink<Void> reportLink = new BookmarkablePageLink<Void>("reports", Reports.class);
String allowedReportRoles = xmlRolesReader.getEntitlement("Reports", "list");
MetaDataRoleAuthorizationStrategy.authorize(reportLink, WebPage.ENABLE, allowedReportRoles);
>>>>>>>
BookmarkablePageLink<Page> reportLink = new BookmarkablePageLink<Page>("reports", Reports.class);
MetaDataRoleAuthorizationStrategy.authorize(
reportLink, WebPage.ENABLE, xmlRolesReader.getEntitlement("Reports", "list"));
<<<<<<<
MetaDataRoleAuthorizationStrategy.authorize(
configurationLink, WebPage.ENABLE, xmlRolesReader.getAllAllowedRoles("Configuration", "list"));
=======
String allowedConfigurationRoles = xmlRolesReader.getEntitlement("Configuration", "list");
MetaDataRoleAuthorizationStrategy.authorize(configurationLink, WebPage.ENABLE, allowedConfigurationRoles);
>>>>>>>
MetaDataRoleAuthorizationStrategy.authorize(
configurationLink, WebPage.ENABLE, xmlRolesReader.getEntitlement("Configuration", "list"));
<<<<<<<
BookmarkablePageLink<Page> taskLink = new BookmarkablePageLink<Page>("tasks", Tasks.class);
MetaDataRoleAuthorizationStrategy.authorize(
taskLink, WebPage.ENABLE, xmlRolesReader.getAllAllowedRoles("Tasks", "list"));
=======
BookmarkablePageLink<Void> taskLink = new BookmarkablePageLink<Void>("tasks", Tasks.class);
String allowedTasksRoles = xmlRolesReader.getEntitlement("Tasks", "list");
MetaDataRoleAuthorizationStrategy.authorize(taskLink, WebPage.ENABLE, allowedTasksRoles);
>>>>>>>
BookmarkablePageLink<Page> taskLink = new BookmarkablePageLink<Page>("tasks", Tasks.class);
MetaDataRoleAuthorizationStrategy.authorize(
taskLink, WebPage.ENABLE, xmlRolesReader.getEntitlement("Tasks", "list")); |
<<<<<<<
public ReportTO create(final ReportTO reportTO) {
LOG.debug("Creating report " + reportTO);
=======
public ReportTO createInternal(final ReportTO reportTO) {
>>>>>>>
public ReportTO create(final ReportTO reportTO) {
<<<<<<<
public ReportTO update(final ReportTO reportTO) {
LOG.debug("Report update called with parameter {}", reportTO);
=======
@RequestMapping(method = RequestMethod.POST, value = "/update")
public ReportTO update(@RequestBody final ReportTO reportTO) {
>>>>>>>
public ReportTO update(final ReportTO reportTO) { |
<<<<<<<
import org.apache.commons.lang3.StringUtils;
import org.apache.syncope.common.to.AbstractSchemaTO;
import org.apache.syncope.common.to.DerSchemaTO;
=======
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.ArrayUtils;
>>>>>>>
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.syncope.common.to.AbstractSchemaTO;
import org.apache.syncope.common.to.DerSchemaTO;
<<<<<<<
import org.apache.syncope.common.to.VirSchemaTO;
import org.apache.syncope.common.types.AttributableType;
import org.apache.syncope.common.types.AuditElements;
import org.apache.syncope.common.types.SchemaType;
import org.apache.syncope.common.types.ClientExceptionType;
=======
import org.apache.syncope.common.types.SyncopeClientExceptionType;
import org.apache.syncope.common.validation.SyncopeClientCompositeErrorException;
>>>>>>>
import org.apache.syncope.common.to.VirSchemaTO;
import org.apache.syncope.common.types.AttributableType;
import org.apache.syncope.common.types.ClientExceptionType;
import org.apache.syncope.common.types.SchemaType;
<<<<<<<
import org.apache.syncope.core.audit.AuditManager;
import org.apache.syncope.core.persistence.beans.AbstractDerSchema;
import org.apache.syncope.core.persistence.beans.AbstractNormalSchema;
import org.apache.syncope.core.persistence.beans.AbstractVirSchema;
import org.apache.syncope.core.persistence.dao.DerSchemaDAO;
=======
import org.apache.syncope.core.persistence.beans.AbstractSchema;
>>>>>>>
import org.apache.syncope.core.persistence.beans.AbstractDerSchema;
import org.apache.syncope.core.persistence.beans.AbstractNormalSchema;
import org.apache.syncope.core.persistence.beans.AbstractVirSchema;
import org.apache.syncope.core.persistence.dao.DerSchemaDAO;
<<<<<<<
import org.springframework.stereotype.Component;
@Component
public class SchemaController extends AbstractController {
@Autowired
private AuditManager auditManager;
=======
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
@Controller
@RequestMapping("/schema")
public class SchemaController extends AbstractTransactionalController<SchemaTO> {
>>>>>>>
import org.springframework.stereotype.Component;
@Component
public class SchemaController extends AbstractTransactionalController<SchemaTO> {
<<<<<<<
switch (schemaType) {
case VIRTUAL:
virSchemaDAO.delete(schemaName, attrUtil);
break;
case DERIVED:
derSchemaDAO.delete(schemaName, attrUtil);
break;
case NORMAL:
default:
schemaDAO.delete(schemaName, attrUtil);
}
auditManager.audit(AuditElements.Category.schema, AuditElements.SchemaSubCategory.delete,
AuditElements.Result.success,
"Successfully deleted schema: " + schemaType + "/" + attrType + "/" + schemaName);
=======
SchemaTO schemaToDelete = binder.getSchemaTO(schema);
schemaDAO.delete(schemaName, getAttributableUtil(kind));
return schemaToDelete;
>>>>>>>
switch (schemaType) {
case VIRTUAL:
virSchemaDAO.delete(schemaName, attrUtil);
break;
case DERIVED:
derSchemaDAO.delete(schemaName, attrUtil);
break;
case NORMAL:
default:
schemaDAO.delete(schemaName, attrUtil);
}
<<<<<<<
auditManager.audit(AuditElements.Category.schema, AuditElements.SchemaSubCategory.read,
AuditElements.Result.success,
"Successfully read schema: " + schemaType + "/" + attrType + "/" + schemaName);
return read;
=======
return binder.getSchemaTO(schema);
>>>>>>>
return read; |
<<<<<<<
UserWorkflowService userService2 =
clientFactory.create("rossini", ADMIN_PWD).getService(UserWorkflowService.class);
=======
UserWorkflowService userService2 = setupCredentials(
userWorkflowService, UserWorkflowService.class, "rossini", ADMIN_PWD);
>>>>>>>
UserWorkflowService userService2 = clientFactory.create(
"rossini", ADMIN_PWD).getService(UserWorkflowService.class);
<<<<<<<
UserWorkflowService userService3 =
clientFactory.create("bellini", ADMIN_PWD).getService(UserWorkflowService.class);
=======
UserWorkflowService userService3 = setupCredentials(userWorkflowService, UserWorkflowService.class, "bellini",
ADMIN_PWD);
>>>>>>>
UserWorkflowService userService3 = clientFactory.create(
"bellini", ADMIN_PWD).getService(UserWorkflowService.class);
<<<<<<<
ConnObjectTO userOnDb =
resourceService.getConnectorObject(RESOURCE_NAME_TESTDB, AttributableType.USER, userTO.getId());
final AttributeTO pwdOnTestDbAttr = userOnDb.getAttrMap().get(OperationalAttributes.PASSWORD_NAME);
=======
ConnObjectTO userOnDb = resourceService.getConnectorObject(
RESOURCE_NAME_TESTDB, AttributableType.USER, userTO.getId());
final AttributeTO pwdOnTestDbAttr = userOnDb.getAttributeMap().get(OperationalAttributes.PASSWORD_NAME);
>>>>>>>
ConnObjectTO userOnDb = resourceService.getConnectorObject(
RESOURCE_NAME_TESTDB, AttributableType.USER, userTO.getId());
final AttributeTO pwdOnTestDbAttr = userOnDb.getAttrMap().get(OperationalAttributes.PASSWORD_NAME);
<<<<<<<
ConnObjectTO userOnDb2 =
resourceService.getConnectorObject("resource-testdb2", AttributableType.USER, userTO.getId());
final AttributeTO pwdOnTestDb2Attr = userOnDb2.getAttrMap().get(OperationalAttributes.PASSWORD_NAME);
=======
ConnObjectTO userOnDb2 = resourceService.getConnectorObject(
"resource-testdb2", AttributableType.USER, userTO.getId());
final AttributeTO pwdOnTestDb2Attr = userOnDb2.getAttributeMap().get(OperationalAttributes.PASSWORD_NAME);
>>>>>>>
ConnObjectTO userOnDb2 = resourceService.getConnectorObject(
"resource-testdb2", AttributableType.USER, userTO.getId());
final AttributeTO pwdOnTestDb2Attr = userOnDb2.getAttrMap().get(OperationalAttributes.PASSWORD_NAME);
<<<<<<<
@Test
public void unlink() {
UserTO userTO = getUniqueSampleTO("[email protected]");
userTO.getResources().clear();
userTO.getMemberships().clear();
userTO.getDerAttrs().clear();
userTO.getVirAttrs().clear();
userTO.getDerAttrs().add(attributeTO("csvuserid", null));
userTO.getResources().add(RESOURCE_NAME_CSV);
UserTO actual = createUser(userTO);
assertNotNull(actual);
ConnObjectTO connObjectTO = readConnectorObject(RESOURCE_NAME_CSV, actual.getId());
assertNotNull(connObjectTO);
PropagationTargetsTO res = new PropagationTargetsTO();
res.getResources().add(RESOURCE_NAME_CSV);
actual = userService.unlink(actual.getId(), res);
assertNotNull(actual);
assertTrue(actual.getResources().isEmpty());
actual = userService.read(actual.getId());
assertNotNull(actual);
assertTrue(actual.getResources().isEmpty());
connObjectTO = readConnectorObject(RESOURCE_NAME_CSV, actual.getId());
assertNotNull(connObjectTO);
=======
@Test
public void issueSYNCOPE426() {
UserTO userTO = getUniqueSampleTO("[email protected]");
userTO = createUser(userTO);
assertNotNull(userTO);
UserMod userMod = new UserMod();
userMod.setPassword("anotherPassword123");
userTO = userService.update(userTO.getId(), userMod);
assertNotNull(userTO);
}
private boolean getBooleanAttribute(ConnObjectTO connObjectTO, String attrName) {
return Boolean.parseBoolean(getStringAttribute(connObjectTO, attrName));
>>>>>>>
@Test
public void unlink() {
UserTO userTO = getUniqueSampleTO("[email protected]");
userTO.getResources().clear();
userTO.getMemberships().clear();
userTO.getDerAttrs().clear();
userTO.getVirAttrs().clear();
userTO.getDerAttrs().add(attributeTO("csvuserid", null));
userTO.getResources().add(RESOURCE_NAME_CSV);
UserTO actual = createUser(userTO);
assertNotNull(actual);
ConnObjectTO connObjectTO = readConnectorObject(RESOURCE_NAME_CSV, actual.getId());
assertNotNull(connObjectTO);
PropagationTargetsTO res = new PropagationTargetsTO();
res.getResources().add(RESOURCE_NAME_CSV);
actual = userService.unlink(actual.getId(), res);
assertNotNull(actual);
assertTrue(actual.getResources().isEmpty());
actual = userService.read(actual.getId());
assertNotNull(actual);
assertTrue(actual.getResources().isEmpty());
connObjectTO = readConnectorObject(RESOURCE_NAME_CSV, actual.getId());
assertNotNull(connObjectTO); |
<<<<<<<
import java.util.Set;
=======
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.ArrayUtils;
>>>>>>>
import java.util.Set;
import org.apache.commons.lang3.ArrayUtils;
<<<<<<<
import org.apache.syncope.common.to.AbstractTaskTO;
import org.apache.syncope.common.types.AuditElements.Category;
import org.apache.syncope.common.types.AuditElements.Result;
import org.apache.syncope.common.types.AuditElements.TaskSubCategory;
=======
import org.apache.syncope.common.to.TaskTO;
>>>>>>>
import org.apache.syncope.common.to.AbstractTaskTO;
<<<<<<<
import org.springframework.stereotype.Component;
@Component
public class TaskController extends AbstractController {
@Autowired
private AuditManager auditManager;
=======
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
@Controller
@RequestMapping("/task")
public class TaskController extends AbstractTransactionalController<TaskTO> {
>>>>>>>
import org.springframework.stereotype.Component;
@Component
public class TaskController extends AbstractTransactionalController<AbstractTaskTO> {
<<<<<<<
TaskUtil taskUtil = TaskUtil.getInstance(task);
auditManager.audit(Category.task, TaskSubCategory.read, Result.success,
"Successfully read task: " + task.getId() + "/" + taskUtil);
return binder.getTaskTO(task, taskUtil);
=======
return binder.getTaskTO(task, getTaskUtil(task));
>>>>>>>
return binder.getTaskTO(task, TaskUtil.getInstance(task));
}
@PreAuthorize("hasRole('TASK_READ')")
public TaskExecTO readExecution(final Long executionId) {
TaskExec taskExec = taskExecDAO.find(executionId);
if (taskExec == null) {
throw new NotFoundException("Task execution " + executionId);
}
return binder.getTaskExecTO(taskExec);
}
@PreAuthorize("hasRole('TASK_EXECUTE')")
public TaskExecTO execute(final Long taskId, final boolean dryRun) {
Task task = taskDAO.find(taskId);
if (task == null) {
throw new NotFoundException("Task " + taskId);
}
TaskUtil taskUtil = TaskUtil.getInstance(task);
TaskExecTO result = null;
LOG.debug("Execution started for {}", task);
switch (taskUtil.getType()) {
case PROPAGATION:
final TaskExec propExec = taskExecutor.execute((PropagationTask) task);
result = binder.getTaskExecTO(propExec);
break;
case NOTIFICATION:
final TaskExec notExec = notificationJob.executeSingle((NotificationTask) task);
result = binder.getTaskExecTO(notExec);
break;
case SCHEDULED:
case SYNCHRONIZATION:
try {
jobInstanceLoader.registerJob(task,
((SchedTask) task).getJobClassName(),
((SchedTask) task).getCronExpression());
JobDataMap map = new JobDataMap();
map.put(AbstractTaskJob.DRY_RUN_JOBDETAIL_KEY, dryRun);
scheduler.getScheduler().triggerJob(
new JobKey(JobInstanceLoader.getJobName(task), Scheduler.DEFAULT_GROUP), map);
} catch (Exception e) {
LOG.error("While executing task {}", task, e);
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
sce.getElements().add(e.getMessage());
throw sce;
}
result = new TaskExecTO();
result.setTask(taskId);
result.setStartDate(new Date());
result.setStatus("JOB_FIRED");
result.setMessage("Job fired; waiting for results...");
break;
default:
}
LOG.debug("Execution finished for {}, {}", task, result);
return result;
<<<<<<<
auditManager.audit(Category.task, TaskSubCategory.execute, Result.failure,
"Could not start execution for task: " + task.getId() + "/" + taskUtil, e);
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
sce.getElements().add(e.getMessage());
throw sce;
=======
SyncopeClientCompositeErrorException scce = new SyncopeClientCompositeErrorException(
HttpStatus.BAD_REQUEST);
SyncopeClientException sce = new SyncopeClientException(SyncopeClientExceptionType.Scheduling);
sce.addElement(e.getMessage());
scce.addException(sce);
throw scce;
>>>>>>>
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
sce.getElements().add(e.getMessage());
throw sce;
<<<<<<<
auditManager.audit(Category.task, TaskSubCategory.report, Result.failure,
"Could not reported execution status: " + exec.getId() + "/" + taskUtil, sce);
throw sce;
=======
SyncopeClientCompositeErrorException scce =
new SyncopeClientCompositeErrorException(HttpStatus.BAD_REQUEST);
scce.addException(sce);
throw scce;
>>>>>>>
throw sce; |
<<<<<<<
public <T extends SchedTaskTO> T createSchedTask(final T taskTO) {
LOG.debug("Creating task " + taskTO);
TaskUtil taskUtil = TaskUtil.getInstance(taskTO);
=======
public TaskTO createSchedTaskInternal(final SchedTaskTO taskTO) {
TaskUtil taskUtil = getTaskUtil(taskTO);
>>>>>>>
public <T extends SchedTaskTO> T createSchedTask(final T taskTO) {
TaskUtil taskUtil = TaskUtil.getInstance(taskTO);
<<<<<<<
public <T extends SchedTaskTO> T updateSched(final SchedTaskTO taskTO) {
LOG.debug("Task update called with parameter {}", taskTO);
=======
@RequestMapping(method = RequestMethod.POST, value = "/update/sched")
public TaskTO updateSched(@RequestBody final SchedTaskTO taskTO) {
>>>>>>>
public <T extends SchedTaskTO> T updateSched(final SchedTaskTO taskTO) {
<<<<<<<
LOG.debug("Execution started for {}", task);
switch (taskUtil.getType()) {
=======
switch (taskUtil) {
>>>>>>>
switch (taskUtil.getType()) {
<<<<<<<
public BulkActionRes bulk(final BulkAction bulkAction) {
LOG.debug("Bulk '{}' called on '{}'", bulkAction.getOperation(), bulkAction.getTargets());
=======
@RequestMapping(method = RequestMethod.POST, value = "/bulk")
public BulkActionRes bulkAction(@RequestBody final BulkAction bulkAction) {
>>>>>>>
public BulkActionRes bulk(final BulkAction bulkAction) { |
<<<<<<<
=======
this.resourceTO = resourceTO;
connConfProperties = getConnConfProperties();
connConfPropContainer = new WebMarkupContainer("connectorPropertiesContainer");
connConfPropContainer.setOutputMarkupId(true);
add(connConfPropContainer);
/*
* the list of overridable connector properties
*/
final ListView<ConnConfProperty> connPropView = new ConnConfPropertyListView("connectorProperties",
new PropertyModel<List<ConnConfProperty>>(this, "connConfProperties"),
false, resourceTO.getConfOverride());
connPropView.setOutputMarkupId(true);
connConfPropContainer.add(connPropView);
check = new IndicatingAjaxButton("check", new ResourceModel("check")) {
>>>>>>> |
<<<<<<<
import javax.ws.rs.Consumes;
=======
>>>>>>>
import javax.ws.rs.Consumes;
<<<<<<<
@Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
void importDefinition(@PathParam("kind") AttributableType kind, String definition);
/**
* @param kind Kind can be USER or ROLE only!
* @return Returns existing tasks for matching kind.
*/
@GET
@Path("tasks")
@Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
WorkflowTasks getDefinedTasks(@PathParam("kind") AttributableType kind);
=======
void updateDefinition(@PathParam("kind") AttributableType kind, WorkflowDefinitionTO definition);
>>>>>>>
@Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
void importDefinition(@PathParam("kind") AttributableType kind, String definition); |
<<<<<<<
final String schemaName = virAttr.getSchema().getName();
final List<String> values = virAttrCache.get(attrUtil.getType(), owner.getId(), schemaName);
=======
final String schemaName = virAttr.getVirtualSchema().getName();
final VirAttrCacheValue virAttrCacheValue = virAttrCache.get(attrUtil.getType(), owner.getId(), schemaName);
>>>>>>>
final String schemaName = virAttr.getSchema().getName();
final VirAttrCacheValue virAttrCacheValue = virAttrCache.get(attrUtil.getType(), owner.getId(), schemaName); |
<<<<<<<
selenium.click("//td[6]/div/span[12]/a");
=======
selenium.click("//td[6]/div/span[9]/a");
selenium.waitForCondition("selenium.isElementPresent(" + "\"class=wicket_modal\");", "30000");
selenium.waitForFrameToLoad("class=wicket_modal", "30000");
selenium.selectFrame("index=0");
>>>>>>>
selenium.click("//td[6]/div/span[12]/a");
selenium.waitForCondition("selenium.isElementPresent(" + "\"class=wicket_modal\");", "30000");
selenium.waitForFrameToLoad("class=wicket_modal", "30000");
selenium.selectFrame("index=0"); |
<<<<<<<
import java.security.AccessControlException;
=======
import java.lang.reflect.Method;
import java.util.AbstractMap.SimpleEntry;
>>>>>>>
import java.lang.reflect.Method;
import java.security.AccessControlException;
<<<<<<<
import java.util.Collection;
import java.util.Collections;
=======
>>>>>>>
import java.util.Collection;
<<<<<<<
import org.apache.syncope.common.mod.StatusMod;
=======
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.ArrayUtils;
>>>>>>>
import org.apache.syncope.common.mod.StatusMod;
import org.apache.commons.lang3.ArrayUtils;
<<<<<<<
import org.apache.syncope.common.types.AuditElements;
import org.apache.syncope.common.types.AuditElements.Category;
import org.apache.syncope.common.types.AuditElements.Result;
import org.apache.syncope.common.types.AuditElements.UserSubCategory;
import org.apache.syncope.common.types.ClientExceptionType;
=======
import org.apache.syncope.common.types.ResourceOperation;
import org.apache.syncope.common.types.SyncopeClientExceptionType;
import org.apache.syncope.common.validation.SyncopeClientCompositeErrorException;
>>>>>>>
import org.apache.syncope.common.types.ClientExceptionType;
<<<<<<<
@Component
public class UserController extends AbstractResourceAssociator<UserTO> {
/**
* Logger.
*/
protected static final Logger LOG = LoggerFactory.getLogger(UserController.class);
@Autowired
protected AuditManager auditManager;
=======
@Controller
@RequestMapping("/user")
public class UserController extends AbstractController<UserTO> {
>>>>>>>
@Component
public class UserController extends AbstractResourceAssociator<UserTO> {
/**
* Logger.
*/
protected static final Logger LOG = LoggerFactory.getLogger(UserController.class);
<<<<<<<
List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(
user, statusMod.getType() != StatusMod.ModType.SUSPEND, resourcesToBeExcluded);
taskExecutor.execute(tasks);
notificationManager.createTasks(updated.getResult(), updated.getPerformedTasks());
=======
List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(user, status, resourcesToBeExcluded);
PropagationReporter propReporter =
ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class);
try {
taskExecutor.execute(tasks, propReporter);
} catch (PropagationException e) {
LOG.error("Error propagation primary resource", e);
propReporter.onPrimaryResourceFailure(tasks);
}
>>>>>>>
List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(
user, statusMod.getType() != StatusMod.ModType.SUSPEND, resourcesToBeExcluded);
PropagationReporter propReporter =
ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class);
try {
taskExecutor.execute(tasks, propReporter);
} catch (PropagationException e) {
LOG.error("Error propagation primary resource", e);
propReporter.onPrimaryResourceFailure(tasks);
}
<<<<<<<
auditManager.audit(Category.user, UserSubCategory.delete, Result.failure,
"Could not delete user: " + userId + " because of role(s) ownership " + owned);
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.RoleOwnership);
sce.getElements().addAll(owned);
throw sce;
=======
SyncopeClientCompositeErrorException sccee =
new SyncopeClientCompositeErrorException(HttpStatus.BAD_REQUEST);
SyncopeClientException sce = new SyncopeClientException(SyncopeClientExceptionType.RoleOwnership);
sce.setElements(owned);
sccee.addException(sce);
throw sccee;
>>>>>>>
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.RoleOwnership);
sce.getElements().addAll(owned);
throw sce;
<<<<<<<
final UserTO deletedTO;
SyncopeUser deleted = userDAO.find(userId);
if (deleted == null) {
deletedTO = new UserTO();
deletedTO.setId(userId);
} else {
deletedTO = binder.getUserTO(userId);
}
deletedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
auditManager.audit(Category.user, UserSubCategory.delete, Result.success,
"Successfully deleted user: " + userId);
=======
>>>>>>>
final UserTO deletedTO;
SyncopeUser deleted = userDAO.find(userId);
if (deleted == null) {
deletedTO = new UserTO();
deletedTO.setId(userId);
} else {
deletedTO = binder.getUserTO(userId);
}
deletedTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
<<<<<<<
@PreAuthorize("hasRole('USER_UPDATE')")
@Transactional(rollbackFor = { Throwable.class })
@Override
public UserTO unlink(final Long userId, final Collection<String> resources) {
LOG.debug("About to unlink user({}) and resources {}", userId, resources);
final UserMod userMod = new UserMod();
userMod.setId(userId);
userMod.getResourcesToRemove().addAll(resources);
WorkflowResult<Map.Entry<UserMod, Boolean>> updated = uwfAdapter.update(userMod);
final UserTO updatedTO = binder.getUserTO(updated.getResult().getKey().getId());
auditManager.audit(Category.user, UserSubCategory.update, Result.success,
"Successfully updated user: " + updatedTO.getUsername());
LOG.debug("About to return updated user\n{}", updatedTO);
return updatedTO;
}
@PreAuthorize("hasRole('USER_UPDATE')")
@Transactional(rollbackFor = { Throwable.class })
@Override
public UserTO unassign(final Long userId, final Collection<String> resources) {
LOG.debug("About to unassign user({}) and resources {}", userId, resources);
final UserMod userMod = new UserMod();
userMod.setId(userId);
userMod.getResourcesToRemove().addAll(resources);
return update(userMod);
}
@PreAuthorize("hasRole('USER_UPDATE')")
@Transactional(rollbackFor = { Throwable.class })
@Override
public UserTO deprovision(final Long userId, final Collection<String> resources) {
LOG.debug("About to deprovision user({}) from resources {}", userId, resources);
final SyncopeUser user = binder.getUserFromId(userId);
final Set<String> noPropResourceName = user.getResourceNames();
noPropResourceName.removeAll(resources);
final List<PropagationTask> tasks = propagationManager.getUserDeleteTaskIds(userId, noPropResourceName);
PropagationReporter propagationReporter = ApplicationContextProvider.getApplicationContext().
getBean(PropagationReporter.class);
try {
taskExecutor.execute(tasks, propagationReporter);
} catch (PropagationException e) {
LOG.error("Error propagation primary resource", e);
propagationReporter.onPrimaryResourceFailure(tasks);
}
final UserTO updatedUserTO = binder.getUserTO(user);
updatedUserTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
auditManager.audit(Category.user, UserSubCategory.update, Result.success,
"Successfully deprovisioned user: " + updatedUserTO.getUsername());
LOG.debug("About to return updated user\n{}", updatedUserTO);
return updatedUserTO;
}
=======
/**
* {@inheritDoc}
*/
@Override
protected UserTO resolveReference(final Method method, final Object... args) throws UnresolvedReferenceException {
Object id = null;
if (ArrayUtils.isNotEmpty(args) && !"claimForm".equals(method.getName())) {
for (int i = 0; id == null && i < args.length; i++) {
if (args[i] instanceof Long) {
id = (Long) args[i];
} else if (args[i] instanceof String) {
id = (String) args[i];
} else if (args[i] instanceof UserTO) {
id = ((UserTO) args[i]).getId();
} else if (args[i] instanceof UserMod) {
id = ((UserMod) args[i]).getId();
}
}
}
if (id != null) {
try {
return id instanceof Long ? binder.getUserTO((Long) id) : binder.getUserTO((String) id);
} catch (Throwable ignore) {
LOG.debug("Unresolved reference", ignore);
throw new UnresolvedReferenceException(ignore);
}
}
throw new UnresolvedReferenceException();
}
>>>>>>>
@PreAuthorize("hasRole('USER_UPDATE')")
@Transactional(rollbackFor = {Throwable.class})
@Override
public UserTO unlink(final Long userId, final Collection<String> resources) {
LOG.debug("About to unlink user({}) and resources {}", userId, resources);
final UserMod userMod = new UserMod();
userMod.setId(userId);
userMod.getResourcesToRemove().addAll(resources);
WorkflowResult<Map.Entry<UserMod, Boolean>> updated = uwfAdapter.update(userMod);
final UserTO updatedTO = binder.getUserTO(updated.getResult().getKey().getId());
LOG.debug("About to return updated user\n{}", updatedTO);
return updatedTO;
}
@PreAuthorize("hasRole('USER_UPDATE')")
@Transactional(rollbackFor = {Throwable.class})
@Override
public UserTO unassign(final Long userId, final Collection<String> resources) {
LOG.debug("About to unassign user({}) and resources {}", userId, resources);
final UserMod userMod = new UserMod();
userMod.setId(userId);
userMod.getResourcesToRemove().addAll(resources);
return update(userMod);
}
@PreAuthorize("hasRole('USER_UPDATE')")
@Transactional(rollbackFor = {Throwable.class})
@Override
public UserTO deprovision(final Long userId, final Collection<String> resources) {
LOG.debug("About to deprovision user({}) from resources {}", userId, resources);
final SyncopeUser user = binder.getUserFromId(userId);
final Set<String> noPropResourceName = user.getResourceNames();
noPropResourceName.removeAll(resources);
final List<PropagationTask> tasks = propagationManager.getUserDeleteTaskIds(userId, noPropResourceName);
PropagationReporter propagationReporter = ApplicationContextProvider.getApplicationContext().
getBean(PropagationReporter.class);
try {
taskExecutor.execute(tasks, propagationReporter);
} catch (PropagationException e) {
LOG.error("Error propagation primary resource", e);
propagationReporter.onPrimaryResourceFailure(tasks);
}
final UserTO updatedUserTO = binder.getUserTO(user);
updatedUserTO.getPropagationStatusTOs().addAll(propagationReporter.getStatuses());
LOG.debug("About to return updated user\n{}", updatedUserTO);
return updatedUserTO;
}
/**
* {@inheritDoc}
*/
@Override
protected UserTO resolveReference(final Method method, final Object... args) throws UnresolvedReferenceException {
Object id = null;
if (ArrayUtils.isNotEmpty(args)) {
for (int i = 0; id == null && i < args.length; i++) {
if (args[i] instanceof Long) {
id = (Long) args[i];
} else if (args[i] instanceof String) {
id = (String) args[i];
} else if (args[i] instanceof UserTO) {
id = ((UserTO) args[i]).getId();
} else if (args[i] instanceof UserMod) {
id = ((UserMod) args[i]).getId();
}
}
}
if (id != null) {
try {
return id instanceof Long ? binder.getUserTO((Long) id) : binder.getUserTO((String) id);
} catch (Throwable ignore) {
LOG.debug("Unresolved reference", ignore);
throw new UnresolvedReferenceException(ignore);
}
}
throw new UnresolvedReferenceException();
} |
<<<<<<<
=======
import org.apache.commons.lang3.ArrayUtils;
import org.apache.syncope.common.SyncopeConstants;
>>>>>>>
import org.apache.commons.lang3.ArrayUtils;
<<<<<<<
@Component
public class ReportController extends AbstractController {
@Autowired
private AuditManager auditManager;
=======
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
@Controller
@RequestMapping("/report")
public class ReportController extends AbstractTransactionalController<ReportTO> {
>>>>>>>
@Component
public class ReportController extends AbstractTransactionalController<ReportTO> {
<<<<<<<
auditManager.audit(Category.report, ReportSubCategory.getReportletConfClasses, Result.success,
"Successfully listed all ReportletConf classes: " + reportletConfClasses.size());
=======
>>>>>>>
<<<<<<<
auditManager.audit(Category.report, ReportSubCategory.execute, Result.failure,
"Could not start execution for report: " + report.getId(), e);
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
sce.getElements().add(e.getMessage());
throw sce;
=======
SyncopeClientCompositeErrorException scce =
new SyncopeClientCompositeErrorException(HttpStatus.BAD_REQUEST);
SyncopeClientException sce = new SyncopeClientException(SyncopeClientExceptionType.Scheduling);
sce.addElement(e.getMessage());
scce.addException(sce);
throw scce;
>>>>>>>
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling);
sce.getElements().add(e.getMessage());
throw sce; |
<<<<<<<
=======
SyncopeClient anonymousClient = clientFactory.setUseCompression(true).create(anonymousUser, anonymousKey);
>>>>>>>
<<<<<<<
client = SyncopeConsoleApplication.get().getClientFactory().
setDomain(getDomain()).create(username, password);
=======
client = clientFactory.setDomain(getDomain()).setUseCompression(true).create(username, password);
>>>>>>>
client = SyncopeConsoleApplication.get().getClientFactory().
setDomain(getDomain()).create(username, password); |
<<<<<<<
=======
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.ArrayUtils;
>>>>>>>
import org.apache.commons.lang3.ArrayUtils;
<<<<<<<
import org.springframework.stereotype.Component;
@Component
public class NotificationController extends AbstractController {
@Autowired
private AuditManager auditManager;
=======
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
@Controller
@RequestMapping("/notification")
public class NotificationController extends AbstractTransactionalController<NotificationTO> {
>>>>>>>
import org.springframework.stereotype.Component;
@Component
public class NotificationController extends AbstractTransactionalController<NotificationTO> {
<<<<<<<
Notification notification = notificationDAO.save(binder.createNotification(notificationTO));
auditManager.audit(Category.notification, NotificationSubCategory.create, Result.success,
"Successfully created notification: " + notification.getId());
return binder.getNotificationTO(notification);
=======
return binder.getNotificationTO(notificationDAO.save(binder.createNotification(notificationTO)));
>>>>>>>
return binder.getNotificationTO(notificationDAO.save(binder.createNotification(notificationTO))); |
<<<<<<<
import org.apache.syncope.common.to.AbstractPolicyTO;
=======
import org.apache.syncope.common.to.PolicyTO;
import org.apache.syncope.common.to.RoleTO;
>>>>>>>
import org.apache.syncope.common.to.AbstractPolicyTO;
import org.apache.syncope.common.to.RoleTO;
<<<<<<<
private void setPolicySpecification(final AbstractPolicyTO policyTO, final AbstractPolicySpec specification) {
=======
private void setPolicySpecification(final PolicyTO policyTO, final AbstractPolicySpec specification) {
>>>>>>>
private void setPolicySpecification(final AbstractPolicyTO policyTO, final AbstractPolicySpec specification) { |
<<<<<<<
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<RoleTO> search(final NodeCond searchCondition)
=======
@RequestMapping(method = RequestMethod.POST, value = "/search")
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public List<RoleTO> search(@RequestBody final NodeCond searchCondition)
>>>>>>>
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public List<RoleTO> search(final NodeCond searchCondition)
<<<<<<<
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public List<RoleTO> search(final NodeCond searchCondition, final int page, final int size)
=======
@RequestMapping(method = RequestMethod.POST, value = "/search/{page}/{size}")
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public List<RoleTO> search(@RequestBody final NodeCond searchCondition, @PathVariable("page") final int page,
@PathVariable("size") final int size)
>>>>>>>
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public List<RoleTO> search(final NodeCond searchCondition, final int page, final int size)
<<<<<<<
@Transactional(readOnly = true, rollbackFor = { Throwable.class })
public int searchCount(final NodeCond searchCondition)
=======
@RequestMapping(method = RequestMethod.POST, value = "/search/count")
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public ModelAndView searchCount(@RequestBody final NodeCond searchCondition)
>>>>>>>
@Transactional(readOnly = true, rollbackFor = {Throwable.class})
public int searchCount(final NodeCond searchCondition)
<<<<<<<
public RoleTO delete(final Long roleId) {
List<SyncopeRole> ownedRoles = roleDAO.findOwnedByRole(roleId);
if (!ownedRoles.isEmpty()) {
List<String> owned = new ArrayList<String>(ownedRoles.size());
for (SyncopeRole role : ownedRoles) {
owned.add(role.getId() + " " + role.getName());
}
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.RoleOwnership);
sce.getElements().addAll(owned);
throw sce;
}
// Generate propagation tasks for deleting users from role resources, if they are on those resources only
// because of the reason being deleted (see SYNCOPE-357)
List<PropagationTask> tasks = new ArrayList<PropagationTask>();
for (WorkflowResult<Long> wfResult : binder.getUsersOnResourcesOnlyBecauseOfRole(roleId)) {
tasks.addAll(propagationManager.getUserDeleteTaskIds(wfResult));
=======
@RequestMapping(method = RequestMethod.GET, value = "/delete/{roleId}")
public RoleTO delete(@PathVariable("roleId") final Long roleId) {
final List<SyncopeRole> toBeDeprovisioned = new ArrayList<SyncopeRole>();
final SyncopeRole syncopeRole = roleDAO.find(roleId);
if (syncopeRole != null) {
toBeDeprovisioned.add(syncopeRole);
final List<SyncopeRole> descendants = roleDAO.findDescendants(toBeDeprovisioned.get(0));
if (descendants != null) {
toBeDeprovisioned.addAll(descendants);
}
>>>>>>>
public RoleTO delete(final Long roleId) {
List<SyncopeRole> ownedRoles = roleDAO.findOwnedByRole(roleId);
if (!ownedRoles.isEmpty()) {
List<String> owned = new ArrayList<String>(ownedRoles.size());
for (SyncopeRole role : ownedRoles) {
owned.add(role.getId() + " " + role.getName());
}
SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.RoleOwnership);
sce.getElements().addAll(owned);
throw sce;
}
final List<SyncopeRole> toBeDeprovisioned = new ArrayList<SyncopeRole>();
final SyncopeRole syncopeRole = roleDAO.find(roleId);
if (syncopeRole != null) {
toBeDeprovisioned.add(syncopeRole);
final List<SyncopeRole> descendants = roleDAO.findDescendants(toBeDeprovisioned.get(0));
if (descendants != null) {
toBeDeprovisioned.addAll(descendants);
} |
<<<<<<<
import com.uber.jaeger.Span;
=======
>>>>>>>
import com.uber.jaeger.Span; |
<<<<<<<
private static List<Runnable> onShutdownActions = new LinkedList<Runnable>();
private static Metric2Registry metric2Registry;
=======
private static List<Runnable> onShutdownActions = new CopyOnWriteArrayList<Runnable>();
>>>>>>>
private static List<Runnable> onShutdownActions = new CopyOnWriteArrayList<Runnable>();
private static Metric2Registry metric2Registry;
<<<<<<<
SharedMetricRegistries.clear();
metric2Registry = new Metric2Registry();
=======
>>>>>>>
metric2Registry = new Metric2Registry(); |
<<<<<<<
import org.stagemonitor.core.util.CompletedFuture;
=======
import org.stagemonitor.core.util.ExecutorUtils;
>>>>>>>
import org.stagemonitor.core.util.CompletedFuture;
import org.stagemonitor.core.util.ExecutorUtils; |
<<<<<<<
import com.uber.jaeger.Span;
import com.uber.jaeger.Tracer;
import com.uber.jaeger.Tracer.Builder;
import com.uber.jaeger.context.TracingUtils;
import com.uber.jaeger.reporters.LoggingReporter;
import com.uber.jaeger.samplers.ConstSampler;
=======
>>>>>>>
import com.uber.jaeger.Span;
import com.uber.jaeger.Tracer;
import com.uber.jaeger.Tracer.Builder;
import com.uber.jaeger.context.TracingUtils;
import com.uber.jaeger.reporters.LoggingReporter;
import com.uber.jaeger.samplers.ConstSampler; |
<<<<<<<
=======
private static IngressWatcher createIngressWatcher(String ns, String initialResourceVersion) {
return IngressWatcher.create(
threadFactory,
ns,
initialResourceVersion,
processor::dispatchIngressWatch,
isNamespaceStopping(ns));
}
private static DomainWatcher createDomainWatcher(String ns, String initialResourceVersion) {
return DomainWatcher.create(
threadFactory,
ns,
initialResourceVersion,
processor::dispatchDomainWatch,
isNamespaceStopping(ns));
}
>>>>>>>
private static DomainWatcher createDomainWatcher(String ns, String initialResourceVersion) {
return DomainWatcher.create(
threadFactory,
ns,
initialResourceVersion,
processor::dispatchDomainWatch,
isNamespaceStopping(ns));
}
<<<<<<<
=======
private static class IngressListStep extends ResponseStep<V1beta1IngressList> {
private final String ns;
IngressListStep(String ns) {
this.ns = ns;
}
@Override
public NextAction onFailure(Packet packet, CallResponse<V1beta1IngressList> callResponse) {
return callResponse.getStatusCode() == CallBuilder.NOT_FOUND
? onSuccess(packet, callResponse)
: super.onFailure(packet, callResponse);
}
@Override
public NextAction onSuccess(Packet packet, CallResponse<V1beta1IngressList> callResponse) {
@SuppressWarnings("unchecked")
Map<String, DomainPresenceInfo> dpis = (Map<String, DomainPresenceInfo>) packet.get(DPI_MAP);
V1beta1IngressList result = callResponse.getResult();
if (result != null) {
for (V1beta1Ingress ingress : result.getItems()) {
String domainUID = IngressWatcher.getIngressDomainUID(ingress);
String clusterName = IngressWatcher.getIngressClusterName(ingress);
if (domainUID != null && clusterName != null) {
dpis.computeIfAbsent(domainUID, k -> new DomainPresenceInfo(ns, domainUID))
.getIngresses()
.put(clusterName, ingress);
}
}
}
if (!ingressWatchers.containsKey(ns)) {
ingressWatchers.put(ns, createIngressWatcher(ns, getInitialResourceVersion(result)));
}
return doNext(packet);
}
private String getInitialResourceVersion(V1beta1IngressList result) {
return result != null ? result.getMetadata().getResourceVersion() : "";
}
}
>>>>>>> |
<<<<<<<
import io.kubernetes.client.models.V1PersistentVolume;
import oracle.kubernetes.operator.utils.CreateDomainInputs;
import static oracle.kubernetes.operator.utils.KubernetesArtifactUtils.*;
=======
import static oracle.kubernetes.operator.create.KubernetesArtifactUtils.*;
import io.kubernetes.client.models.V1PersistentVolume;
>>>>>>>
import static oracle.kubernetes.operator.utils.KubernetesArtifactUtils.*;
import io.kubernetes.client.models.V1PersistentVolume;
import oracle.kubernetes.operator.utils.CreateDomainInputs; |
<<<<<<<
testClusterScaling(operator, domain);
if (System.getenv("QUICKTEST") == null
|| (System.getenv("QUICKTEST") != null
&& !System.getenv("QUICKTEST").equalsIgnoreCase("true"))) {
testDomainLifecyle(operator, domain);
testOperatorLifecycle(operator, domain);
}
=======
testDomainLifecyle(operator, domain);
testOperatorLifecycle(operator, domain);
testClusterScaling(operator, domain);
>>>>>>>
testClusterScaling(operator, domain);
testDomainLifecyle(operator, domain);
testOperatorLifecycle(operator, domain); |
<<<<<<<
data.put("startServer.sh", START_SERVER_SHELL_SCRIPT);
data.put("start-server.py", START_SERVER_PYTHON_SCRIPT);
data.put("stopServer.sh", STOP_SERVER_SHELL_SCRIPT);
data.put("stop-server.py", STOP_SERVER_PYTHON_SCRIPT);
=======
data.put("readState.sh",
"#!/bin/bash\n" +
"\n" +
"# Reads the current state of a server. The script checks a WebLogic Server state\n" +
"# file which is updated by the node manager.\n" +
"\n" +
"DN=${DOMAIN_NAME:-$1}\n" +
"SN=${SERVER_NAME:-$2}\n" +
"STATEFILE=/shared/domain/${DN}/servers/${SN}/data/nodemanager/${SN}.state\n" +
"\n" +
"if [ `jps -l | grep -c \" weblogic.NodeManager\"` -eq 0 ]; then\n" +
" echo \"Error: WebLogic NodeManager process not found.\"\n" +
" exit 1\n" +
"fi\n" +
"\n" +
"if [ ! -f ${STATEFILE} ]; then\n" +
" echo \"Error: Server state file not found.\"\n" +
" exit 1\n" +
"fi\n" +
"\n" +
"cat ${STATEFILE}\n" +
"exit 0");
>>>>>>>
data.put("startServer.sh", START_SERVER_SHELL_SCRIPT);
data.put("start-server.py", START_SERVER_PYTHON_SCRIPT);
data.put("stopServer.sh", STOP_SERVER_SHELL_SCRIPT);
data.put("stop-server.py", STOP_SERVER_PYTHON_SCRIPT);
data.put("readState.sh",
"#!/bin/bash\n" +
"\n" +
"# Reads the current state of a server. The script checks a WebLogic Server state\n" +
"# file which is updated by the node manager.\n" +
"\n" +
"DN=${DOMAIN_NAME:-$1}\n" +
"SN=${SERVER_NAME:-$2}\n" +
"STATEFILE=/shared/domain/${DN}/servers/${SN}/data/nodemanager/${SN}.state\n" +
"\n" +
"if [ `jps -l | grep -c \" weblogic.NodeManager\"` -eq 0 ]; then\n" +
" echo \"Error: WebLogic NodeManager process not found.\"\n" +
" exit 1\n" +
"fi\n" +
"\n" +
"if [ ! -f ${STATEFILE} ]; then\n" +
" echo \"Error: Server state file not found.\"\n" +
" exit 1\n" +
"fi\n" +
"\n" +
"cat ${STATEFILE}\n" +
"exit 0"); |
<<<<<<<
String serviceURL =
HttpClient.getServiceURL(
service,
pod,
serverConfig.getAdminProtocolChannelName(),
serverConfig.getListenPort());
if (serviceURL != null) {
String jsonResult =
httpClient
.executePostUrlOnServiceClusterIP(
getRetrieveHealthSearchUrl(),
serviceURL,
getRetrieveHealthSearchPayload(),
true)
.getResponse();
ObjectMapper mapper = new ObjectMapper();
JsonNode root = mapper.readTree(jsonResult);
JsonNode state = null;
JsonNode subsystemName = null;
JsonNode symptoms = null;
JsonNode overallHealthState = root.path("overallHealthState");
if (overallHealthState != null) {
state = overallHealthState.path("state");
subsystemName = overallHealthState.path("subsystemName");
symptoms = overallHealthState.path("symptoms");
=======
if (httpClient == null) {
LOGGER.info(
(LoggingFilter) packet.get(LoggingFilter.LOGGING_FILTER_PACKET_KEY),
MessageKeys.WLS_HEALTH_READ_FAILED_NO_HTTPCLIENT,
packet.get(ProcessingConstants.SERVER_NAME));
} else {
String serviceURL = HttpClient.getServiceURL(service);
if (serviceURL != null) {
String jsonResult =
httpClient
.executePostUrlOnServiceClusterIP(
getRetrieveHealthSearchUrl(),
serviceURL,
getRetrieveHealthSearchPayload(),
true)
.getResponse();
ServerHealth health = parseServerHealthJson(jsonResult);
@SuppressWarnings("unchecked")
ConcurrentMap<String, ServerHealth> serverHealthMap =
(ConcurrentMap<String, ServerHealth>)
packet.get(ProcessingConstants.SERVER_HEALTH_MAP);
serverHealthMap.put((String) packet.get(ProcessingConstants.SERVER_NAME), health);
packet.put(ProcessingConstants.SERVER_HEALTH_READ, Boolean.TRUE);
>>>>>>>
if (httpClient == null) {
LOGGER.info(
(LoggingFilter) packet.get(LoggingFilter.LOGGING_FILTER_PACKET_KEY),
MessageKeys.WLS_HEALTH_READ_FAILED_NO_HTTPCLIENT,
packet.get(ProcessingConstants.SERVER_NAME));
} else {
String serviceURL =
HttpClient.getServiceURL(
service,
pod,
serverConfig.getAdminProtocolChannelName(),
serverConfig.getListenPort());
if (serviceURL != null) {
String jsonResult =
httpClient
.executePostUrlOnServiceClusterIP(
getRetrieveHealthSearchUrl(),
serviceURL,
getRetrieveHealthSearchPayload(),
true)
.getResponse();
ServerHealth health = parseServerHealthJson(jsonResult);
@SuppressWarnings("unchecked")
ConcurrentMap<String, ServerHealth> serverHealthMap =
(ConcurrentMap<String, ServerHealth>)
packet.get(ProcessingConstants.SERVER_HEALTH_MAP);
serverHealthMap.put((String) packet.get(ProcessingConstants.SERVER_NAME), health);
packet.put(ProcessingConstants.SERVER_HEALTH_READ, Boolean.TRUE);
<<<<<<<
LOGGER.fine(
MessageKeys.WLS_HEALTH_READ_FAILED, packet.get(ProcessingConstants.SERVER_NAME));
LOGGER.fine(MessageKeys.EXCEPTION, t);
=======
LOGGER.info(
(LoggingFilter) packet.get(LoggingFilter.LOGGING_FILTER_PACKET_KEY),
MessageKeys.WLS_HEALTH_READ_FAILED,
packet.get(ProcessingConstants.SERVER_NAME),
t);
>>>>>>>
LOGGER.info(
(LoggingFilter) packet.get(LoggingFilter.LOGGING_FILTER_PACKET_KEY),
MessageKeys.WLS_HEALTH_READ_FAILED,
packet.get(ProcessingConstants.SERVER_NAME),
t); |
<<<<<<<
import oracle.kubernetes.weblogic.domain.v2.ProbeTuning;
=======
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
>>>>>>>
import oracle.kubernetes.weblogic.domain.v2.ProbeTuning;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder; |
<<<<<<<
=======
import io.kubernetes.client.models.V1beta1IngressList;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
>>>>>>>
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
<<<<<<<
deleteServices(),
deletePods(),
=======
serverDownStep,
deleteIngresses(),
>>>>>>>
serverDownStep, |
<<<<<<<
=======
import oracle.kubernetes.operator.helpers.IngressHelper;
import oracle.kubernetes.operator.helpers.Scan;
import oracle.kubernetes.operator.helpers.ScanCache;
>>>>>>>
import oracle.kubernetes.operator.helpers.Scan;
import oracle.kubernetes.operator.helpers.ScanCache; |
<<<<<<<
import java.util.HashMap;
=======
import java.util.Arrays;
>>>>>>>
import java.util.Arrays;
<<<<<<<
WlsDomainConfig wlsDomainConfig = new WlsDomainConfig(null);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(new ClusterStartup().clusterName("noSuchCluster"));
=======
WlsDomainConfig wlsDomainConfig = new WlsDomainConfig();
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("noSuchCluster")));
>>>>>>>
WlsDomainConfig wlsDomainConfig = new WlsDomainConfig(null);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("noSuchCluster")));
<<<<<<<
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_1_CLUSTER);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(new ClusterStartup().clusterName("DockerCluster")).replicas(10);
=======
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create().load(JSON_STRING_1_CLUSTER);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(10);
>>>>>>>
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_1_CLUSTER);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(10);
<<<<<<<
WlsDomainConfig wlsDomainConfig = new WlsDomainConfig(null);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(new ClusterStartup().clusterName("DockerCluster")).replicas(10);
=======
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create();
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(10);
>>>>>>>
WlsDomainConfig wlsDomainConfig = new WlsDomainConfig(null);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(10);
<<<<<<<
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(new ClusterStartup().clusterName("DockerCluster")).replicas(10);
=======
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create().load(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(10);
>>>>>>>
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(10);
<<<<<<<
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(new ClusterStartup().clusterName("DockerCluster")).replicas(10);
=======
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create().load(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(10);
>>>>>>>
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(10);
<<<<<<<
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_1_CLUSTER);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(new ClusterStartup().clusterName("DockerCluster")).replicas(5);
=======
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create().load(JSON_STRING_1_CLUSTER);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(5);
>>>>>>>
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_1_CLUSTER);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster"))).withReplicas(5);
<<<<<<<
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(new ClusterStartup().clusterName("DockerCluster2").replicas(3)).replicas(5);
=======
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create().load(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster2").withReplicas(3))).withReplicas(5);
>>>>>>>
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster2").withReplicas(3))).withReplicas(5);
<<<<<<<
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
ClusterStartup dockerCluster = new ClusterStartup().clusterName("DockerCluster").replicas(10);
ClusterStartup dockerCluster2 = new ClusterStartup().clusterName("DockerCluster2").replicas(10);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(dockerCluster).addClusterStartupItem(dockerCluster2);
=======
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create().load(JSON_STRING_2_CLUSTERS);
ClusterStartup dockerCluster = new ClusterStartup().withClusterName("DockerCluster").withReplicas(10);
ClusterStartup dockerCluster2 = new ClusterStartup().withClusterName("DockerCluster2").withReplicas(10);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(dockerCluster, dockerCluster2));
>>>>>>>
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
ClusterStartup dockerCluster = new ClusterStartup().withClusterName("DockerCluster").withReplicas(10);
ClusterStartup dockerCluster2 = new ClusterStartup().withClusterName("DockerCluster2").withReplicas(10);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(dockerCluster, dockerCluster2));
<<<<<<<
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().addClusterStartupItem(new ClusterStartup().clusterName("DockerCluster2").replicas(2)).replicas(5);
=======
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create().load(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster2").withReplicas(2))).withReplicas(5);
>>>>>>>
WlsDomainConfig wlsDomainConfig = WlsDomainConfig.create(JSON_STRING_2_CLUSTERS);
DomainSpec domainSpec = new DomainSpec().withClusterStartup(Arrays.asList(new ClusterStartup().withClusterName("DockerCluster2").withReplicas(2))).withReplicas(5); |
<<<<<<<
private static final String DOMAIN_HOME = "/shared/domain/domain1";
private static final String LOG_HOME = "/shared/logs/" + UID;
=======
private static final String DOMAIN_HOME = "/shared/domains/uid1";
private static final String LOG_HOME = "/shared/logs";
>>>>>>>
private static final String DOMAIN_HOME = "/shared/domains/uid1";
private static final String LOG_HOME = "/shared/logs";
<<<<<<<
hasEnvVar("LOG_HOME", LOG_HOME),
hasEnvVar("SERVER_OUT_IN_POD_LOG", INCLUDE_SERVER_OUT_IN_POD_LOG),
=======
hasEnvVar("LOG_HOME", LOG_HOME + "/" + UID),
>>>>>>>
hasEnvVar("SERVER_OUT_IN_POD_LOG", INCLUDE_SERVER_OUT_IN_POD_LOG),
hasEnvVar("LOG_HOME", LOG_HOME + "/" + UID),
<<<<<<<
.addEnvItem(envItem("LOG_HOME", LOG_HOME))
.addEnvItem(envItem("SERVER_OUT_IN_POD_LOG", INCLUDE_SERVER_OUT_IN_POD_LOG))
=======
.addEnvItem(envItem("LOG_HOME", LOG_HOME + "/" + UID))
>>>>>>>
.addEnvItem(envItem("SERVER_OUT_IN_POD_LOG", INCLUDE_SERVER_OUT_IN_POD_LOG))
.addEnvItem(envItem("LOG_HOME", LOG_HOME + "/" + UID)) |
<<<<<<<
* The in-pod name of the directory to store the domain, node manager, server logs, and server
* .out files in.
*/
@SerializedName("logHome")
@Expose
private String logHome;
/** Whether to include the server .out file to the pod's stdout. Default is true. */
@SerializedName("includeServerOutInPodLog")
@Expose
private String includeServerOutInPodLog;
/**
=======
* The WebLogic Docker image.
*
* <p>Defaults to store/oracle/weblogic:12.2.1.3.
*/
@JsonPropertyDescription(
"The Weblogic Docker image; required when domainHomeInImage is true; "
+ "otherwise, defaults to store/oracle/weblogic:12.2.1.3")
@SerializedName("image")
@Expose
private String image;
/**
* The image pull policy for the WebLogic Docker image. Legal values are Always, Never and
* IfNotPresent.
*
* <p>Defaults to Always if image ends in :latest, IfNotPresent otherwise.
*
* <p>More info: https://kubernetes.io/docs/concepts/containers/images#updating-images
*/
@JsonPropertyDescription(
"The image pull policy for the WebLogic Docker image. "
+ ""
+ "Legal values are Always, Never and IfNotPresent. "
+ "Defaults to Always if image ends in :latest, IfNotPresent otherwise")
@SerializedName("imagePullPolicy")
@Expose
private String imagePullPolicy;
/**
* The image pull secret for the WebLogic Docker image.
*
* @deprecated as 2.0, use #imagePullSecrets
*/
@SuppressWarnings({"unused", "DeprecatedIsStillUsed"})
@Deprecated
@SerializedName("imagePullSecret")
@Expose
private V1LocalObjectReference imagePullSecret;
/**
* The image pull secrets for the WebLogic Docker image.
*
* <p>More info:
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.10/#localobjectreference-v1-core
*
* @since 2.0
*/
@JsonPropertyDescription("A list of image pull secrets for the WebLogic Docker image.")
@SerializedName("imagePullSecrets")
@Expose
private List<V1LocalObjectReference> imagePullSecrets;
/**
>>>>>>>
* The in-pod name of the directory to store the domain, node manager, server logs, and server
* .out files in.
*/
@SerializedName("logHome")
@Expose
private String logHome;
/** Whether to include the server .out file to the pod's stdout. Default is true. */
@SerializedName("includeServerOutInPodLog")
@Expose
private String includeServerOutInPodLog;
/*
* The WebLogic Docker image.
*
* <p>Defaults to store/oracle/weblogic:12.2.1.3.
*/
@JsonPropertyDescription(
"The Weblogic Docker image; required when domainHomeInImage is true; "
+ "otherwise, defaults to store/oracle/weblogic:12.2.1.3")
@SerializedName("image")
@Expose
private String image;
/**
* The image pull policy for the WebLogic Docker image. Legal values are Always, Never and
* IfNotPresent.
*
* <p>Defaults to Always if image ends in :latest, IfNotPresent otherwise.
*
* <p>More info: https://kubernetes.io/docs/concepts/containers/images#updating-images
*/
@JsonPropertyDescription(
"The image pull policy for the WebLogic Docker image. "
+ ""
+ "Legal values are Always, Never and IfNotPresent. "
+ "Defaults to Always if image ends in :latest, IfNotPresent otherwise")
@SerializedName("imagePullPolicy")
@Expose
private String imagePullPolicy;
/**
* The image pull secret for the WebLogic Docker image.
*
* @deprecated as 2.0, use #imagePullSecrets
*/
@SuppressWarnings({"unused", "DeprecatedIsStillUsed"})
@Deprecated
@SerializedName("imagePullSecret")
@Expose
private V1LocalObjectReference imagePullSecret;
/**
* The image pull secrets for the WebLogic Docker image.
*
* <p>More info:
* https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.10/#localobjectreference-v1-core
*
* @since 2.0
*/
@JsonPropertyDescription("A list of image pull secrets for the WebLogic Docker image.")
@SerializedName("imagePullSecrets")
@Expose
private List<V1LocalObjectReference> imagePullSecrets;
/**
<<<<<<<
return new ToStringBuilder(this)
.appendSuper(super.toString())
.append("domainUID", domainUID)
.append("domainName", domainName)
.append("adminSecret", adminSecret)
.append("asName", asName)
.append("asPort", asPort)
.append("logHome", logHome)
.append("includeServerOutInPodLog", includeServerOutInPodLog)
.append("exportT3Channels", exportT3Channels)
.append("startupControl", startupControl)
.append("serverStartup", serverStartup)
.append("clusterStartup", clusterStartup)
.append("replicas", replicas)
.append("storage", storage)
.toString();
=======
ToStringBuilder builder =
new ToStringBuilder(this)
.appendSuper(super.toString())
.append("domainUID", domainUID)
.append("domainName", domainName)
.append("adminSecret", adminSecret)
.append("asName", asName)
.append("asPort", asPort)
.append("image", image)
.append("imagePullPolicy", imagePullPolicy)
.append("storage", storage);
if (hasV2Configuration())
builder
.append("imagePullSecrets", imagePullSecrets)
.append("adminServer", adminServer)
.append("managedServers", managedServers)
.append("clusters", clusters);
else
builder
.append("imagePullSecret", imagePullSecrets)
.append("startupControl", startupControl)
.append("serverStartup", serverStartup)
.append("clusterStartup", clusterStartup)
.append("replicas", replicas)
.append("exportT3Channels", exportT3Channels);
return builder.toString();
>>>>>>>
ToStringBuilder builder =
new ToStringBuilder(this)
.appendSuper(super.toString())
.append("domainUID", domainUID)
.append("domainName", domainName)
.append("adminSecret", adminSecret)
.append("asName", asName)
.append("asPort", asPort)
.append("image", image)
.append("imagePullPolicy", imagePullPolicy)
.append("storage", storage);
if (hasV2Configuration())
builder
.append("imagePullSecrets", imagePullSecrets)
.append("adminServer", adminServer)
.append("managedServers", managedServers)
.append("includeServerOutInPodLog", includeServerOutInPodLog)
.append("clusters", clusters);
else
builder
.append("imagePullSecret", imagePullSecrets)
.append("startupControl", startupControl)
.append("serverStartup", serverStartup)
.append("clusterStartup", clusterStartup)
.append("replicas", replicas)
.append("exportT3Channels", exportT3Channels);
return builder.toString();
<<<<<<<
return new EqualsBuilder()
.appendSuper(super.equals(other))
.append(asName, rhs.asName)
.append(replicas, rhs.replicas)
.append(startupControl, rhs.startupControl)
.append(domainUID, rhs.domainUID)
.append(clusterStartup, rhs.clusterStartup)
.append(asPort, rhs.asPort)
.append(domainName, rhs.domainName)
.append(exportT3Channels, rhs.exportT3Channels)
.append(serverStartup, rhs.serverStartup)
.append(adminSecret, rhs.adminSecret)
.append(storage, rhs.storage)
.append(logHome, rhs.logHome)
.append(includeServerOutInPodLog, rhs.includeServerOutInPodLog)
.isEquals();
=======
EqualsBuilder builder =
new EqualsBuilder()
.appendSuper(super.equals(other))
.append(domainUID, rhs.domainUID)
.append(domainName, rhs.domainName)
.append(adminSecret, rhs.adminSecret)
.append(asName, rhs.asName)
.append(asPort, rhs.asPort)
.append(image, rhs.image)
.append(storage, rhs.storage)
.append(imagePullPolicy, rhs.imagePullPolicy);
if (hasV2Configuration())
builder
.append(imagePullSecrets, rhs.imagePullSecrets)
.append(adminServer, rhs.adminServer)
.append(managedServers, rhs.managedServers)
.append(clusters, rhs.clusters);
else
builder
.append(replicas, rhs.replicas)
.append(startupControl, rhs.startupControl)
.append(clusterStartup, rhs.clusterStartup)
.append(exportT3Channels, rhs.exportT3Channels)
.append(serverStartup, rhs.serverStartup);
return builder.isEquals();
>>>>>>>
EqualsBuilder builder =
new EqualsBuilder()
.appendSuper(super.equals(other))
.append(domainUID, rhs.domainUID)
.append(domainName, rhs.domainName)
.append(adminSecret, rhs.adminSecret)
.append(asName, rhs.asName)
.append(asPort, rhs.asPort)
.append(image, rhs.image)
.append(storage, rhs.storage)
.append(imagePullPolicy, rhs.imagePullPolicy);
if (hasV2Configuration())
builder
.append(imagePullSecrets, rhs.imagePullSecrets)
.append(adminServer, rhs.adminServer)
.append(managedServers, rhs.managedServers)
.append(includeServerOutInPodLog, rhs.includeServerOutInPodLog)
.append(clusters, rhs.clusters);
else
builder
.append(replicas, rhs.replicas)
.append(startupControl, rhs.startupControl)
.append(clusterStartup, rhs.clusterStartup)
.append(exportT3Channels, rhs.exportT3Channels)
.append(serverStartup, rhs.serverStartup);
return builder.isEquals(); |
<<<<<<<
=======
private V1ObjectMeta createMetadata() {
return new V1ObjectMeta()
.name(KubernetesConstants.DOMAIN_CONFIG_MAP_NAME)
.namespace(this.domainNamespace)
.putLabelsItem(LabelConstants.RESOURCE_VERSION_LABEL, DEFAULT_DOMAIN_VERSION)
.putLabelsItem(LabelConstants.OPERATORNAME_LABEL, operatorNamespace)
.putLabelsItem(LabelConstants.CREATEDBYOPERATOR_LABEL, "true");
}
>>>>>>>
private V1ObjectMeta createMetadata() {
return new V1ObjectMeta()
.name(KubernetesConstants.DOMAIN_CONFIG_MAP_NAME)
.namespace(this.domainNamespace)
.putLabelsItem(LabelConstants.RESOURCE_VERSION_LABEL, DEFAULT_DOMAIN_VERSION)
.putLabelsItem(LabelConstants.OPERATORNAME_LABEL, operatorNamespace)
.putLabelsItem(LabelConstants.CREATEDBYOPERATOR_LABEL, "true");
}
<<<<<<<
ResponseStep<V1ConfigMap> createReplaceResponseStep(Step next) {
return new ReplaceResponseStep(next);
=======
private boolean isCompatibleMap(V1ConfigMap existingMap) {
return VersionHelper.matchesResourceVersion(existingMap.getMetadata(), DEFAULT_DOMAIN_VERSION)
&& COMPARATOR.containsAll(existingMap, this.model);
>>>>>>>
ResponseStep<V1ConfigMap> createReplaceResponseStep(Step next) {
return new ReplaceResponseStep(next); |
<<<<<<<
@Override
public ServerConfigurator withRestartVersion(String restartVersion) {
server.setRestartVersion(restartVersion);
return this;
}
=======
@Override
public ServerConfigurator withServiceLabel(String name, String value) {
server.addServiceLabels(name, value);
return this;
}
@Override
public ServerConfigurator withServiceAnnotation(String name, String value) {
server.addServiceAnnotations(name, value);
return this;
}
>>>>>>>
@Override
public ServerConfigurator withServiceLabel(String name, String value) {
server.addServiceLabels(name, value);
return this;
}
@Override
public ServerConfigurator withServiceAnnotation(String name, String value) {
server.addServiceAnnotations(name, value);
return this;
}
@Override
public ServerConfigurator withRestartVersion(String restartVersion) {
server.setRestartVersion(restartVersion);
return this;
}
<<<<<<<
@Override
public ClusterConfigurator withRestartVersion(String restartVersion) {
cluster.setRestartVersion(restartVersion);
return this;
}
=======
@Override
public ClusterConfigurator withServiceLabel(String name, String value) {
cluster.addServiceLabels(name, value);
return this;
}
@Override
public ClusterConfigurator withServiceAnnotation(String name, String value) {
cluster.addServiceAnnotations(name, value);
return this;
}
>>>>>>>
@Override
public ClusterConfigurator withServiceLabel(String name, String value) {
cluster.addServiceLabels(name, value);
return this;
}
@Override
public ClusterConfigurator withServiceAnnotation(String name, String value) {
cluster.addServiceAnnotations(name, value);
return this;
}
@Override
public ClusterConfigurator withRestartVersion(String restartVersion) {
cluster.setRestartVersion(restartVersion);
return this;
} |
<<<<<<<
public void testHeredoc() {
testTokenization("cat <<END\nEND", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_MARKER_END);
testTokenization("cat << END", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << \"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << \"END\"\"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << $\"END\"\"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << $\"END\"$\"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat <<'END'", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START);
testTokenization("cat << 'END'", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << $'END'", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << $'END''END'", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat <<END", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START);
testTokenization("cat <<END\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED);
testTokenization("cat <<END\nABC\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT);
testTokenization("cat <<END\nABC\n\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT);
testTokenization("cat <<END\nABC", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT);
testTokenization("cat <<END\nABC\nDEF\nEND\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED);
testTokenization("cat << END\nABC\nDEF\nEND\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END,LINE_FEED);
testTokenization("cat <<-END\nABC\nDEF\nEND\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END,LINE_FEED);
testTokenization("cat <<- END\nABC\nDEF\nEND\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END,LINE_FEED);
testTokenization("cat <<END\nABC\nDEF\nEND", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
testTokenization("cat <<END\nABC\nDEF\n\n\nXYZ DEF\nEND", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
testTokenization("cat <<!\n!", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_MARKER_END);
testTokenization("{\n" +
"cat <<EOF\n" +
"test\n" +
"EOF\n" +
"}", LEFT_CURLY, LINE_FEED, WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED, RIGHT_CURLY);
testTokenization("cat <<EOF\n" +
"$test\n" +
"EOF", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, VARIABLE, HEREDOC_CONTENT, HEREDOC_MARKER_END);
testTokenization("{\n" +
"cat <<EOF\n" +
"$(test)\n" +
"EOF\n" +
"}", LEFT_CURLY, LINE_FEED, WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, DOLLAR, LEFT_PAREN, WORD, RIGHT_PAREN, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED, RIGHT_CURLY);
testTokenization("cat <<X <<\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, WHITESPACE, HEREDOC_MARKER_TAG, LINE_FEED);
}
@Test
public void testMultilineHeredoc() throws Exception {
//multiple heredocs in one command line
testTokenization("cat <<END <<END2\nABC\nEND\nABC\nEND2\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED);
}
@Test
@Ignore //ignored for now because there is a match-all rule for the heredoc start marker
public void _testHeredocErrors() throws Exception {
testTokenization("cat <<\"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START);
//the closing string marker is missing in the heredoc
testTokenization("cat <<\"END", WORD, WHITESPACE, HEREDOC_MARKER_TAG, BAD_CHARACTER, BAD_CHARACTER);
}
@Test
public void testIssue199() throws Exception {
testTokenization("$( ((count != 1)) && echo)", DOLLAR, LEFT_PAREN, WHITESPACE, EXPR_ARITH, WORD, WHITESPACE, ARITH_NE, WHITESPACE, ARITH_NUMBER, _EXPR_ARITH, WHITESPACE, AND_AND, WHITESPACE, WORD, RIGHT_PAREN);
testTokenization("$(((count != 1)) && echo)", DOLLAR, LEFT_PAREN, EXPR_ARITH, WORD, WHITESPACE, ARITH_NE, WHITESPACE, ARITH_NUMBER, _EXPR_ARITH, WHITESPACE, AND_AND, WHITESPACE, WORD, RIGHT_PAREN);
//limitation of the Bash lexer: no look-ahead to the end of an expression
//Bash parses this (probably) as an arithmetic expression with a parenthesis inside
//BashSupport doesn't
testTokenization("(((1==1)))", LEFT_PAREN, EXPR_ARITH, ARITH_NUMBER, ARITH_EQ, ARITH_NUMBER, _EXPR_ARITH, RIGHT_PAREN);
//the grammar is a bit complicated, the expression parsed beginning with $((( depends on the end of the expression
//bash interprets the tokens $(((1+1)+1)) different than $(((1+1)) && echo)
//the first is an arithmetic expression with a sum computation
//the second is a subshell with an embedded arithmetic command and an echo command
//if an expression starts with three or more parentheses the rule is:
// if the expression ends with a single parenthesis, then the first opening parenthesis opens a subshell
// if the expression ends with two parentheses, then the first two start an arithmetic command
}
@Test
=======
public void testIssue201() throws Exception {
testTokenization("((!foo))", EXPR_ARITH, ARITH_NEGATE, WORD, _EXPR_ARITH);
}
@Test
>>>>>>>
public void testHeredoc() {
testTokenization("cat <<END\nEND", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_MARKER_END);
testTokenization("cat << END", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << \"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << \"END\"\"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << $\"END\"\"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << $\"END\"$\"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat <<'END'", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START);
testTokenization("cat << 'END'", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << $'END'", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat << $'END''END'", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START);
testTokenization("cat <<END", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START);
testTokenization("cat <<END\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED);
testTokenization("cat <<END\nABC\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT);
testTokenization("cat <<END\nABC\n\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT);
testTokenization("cat <<END\nABC", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT);
testTokenization("cat <<END\nABC\nDEF\nEND\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED);
testTokenization("cat << END\nABC\nDEF\nEND\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END,LINE_FEED);
testTokenization("cat <<-END\nABC\nDEF\nEND\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END,LINE_FEED);
testTokenization("cat <<- END\nABC\nDEF\nEND\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, WHITESPACE, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END,LINE_FEED);
testTokenization("cat <<END\nABC\nDEF\nEND", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
testTokenization("cat <<END\nABC\nDEF\n\n\nXYZ DEF\nEND", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END);
testTokenization("cat <<!\n!", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_MARKER_END);
testTokenization("{\n" +
"cat <<EOF\n" +
"test\n" +
"EOF\n" +
"}", LEFT_CURLY, LINE_FEED, WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED, RIGHT_CURLY);
testTokenization("cat <<EOF\n" +
"$test\n" +
"EOF", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, VARIABLE, HEREDOC_CONTENT, HEREDOC_MARKER_END);
testTokenization("{\n" +
"cat <<EOF\n" +
"$(test)\n" +
"EOF\n" +
"}", LEFT_CURLY, LINE_FEED, WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, DOLLAR, LEFT_PAREN, WORD, RIGHT_PAREN, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED, RIGHT_CURLY);
testTokenization("cat <<X <<\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, WHITESPACE, HEREDOC_MARKER_TAG, LINE_FEED);
}
@Test
public void testMultilineHeredoc() throws Exception {
//multiple heredocs in one command line
testTokenization("cat <<END <<END2\nABC\nEND\nABC\nEND2\n", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START, LINE_FEED, HEREDOC_CONTENT, HEREDOC_MARKER_END, HEREDOC_CONTENT, HEREDOC_MARKER_END, LINE_FEED);
}
@Test
@Ignore //ignored for now because there is a match-all rule for the heredoc start marker
public void _testHeredocErrors() throws Exception {
testTokenization("cat <<\"END\"", WORD, WHITESPACE, HEREDOC_MARKER_TAG, HEREDOC_MARKER_START);
//the closing string marker is missing in the heredoc
testTokenization("cat <<\"END", WORD, WHITESPACE, HEREDOC_MARKER_TAG, BAD_CHARACTER, BAD_CHARACTER);
}
@Test
public void testIssue199() throws Exception {
testTokenization("$( ((count != 1)) && echo)", DOLLAR, LEFT_PAREN, WHITESPACE, EXPR_ARITH, WORD, WHITESPACE, ARITH_NE, WHITESPACE, ARITH_NUMBER, _EXPR_ARITH, WHITESPACE, AND_AND, WHITESPACE, WORD, RIGHT_PAREN);
testTokenization("$(((count != 1)) && echo)", DOLLAR, LEFT_PAREN, EXPR_ARITH, WORD, WHITESPACE, ARITH_NE, WHITESPACE, ARITH_NUMBER, _EXPR_ARITH, WHITESPACE, AND_AND, WHITESPACE, WORD, RIGHT_PAREN);
//limitation of the Bash lexer: no look-ahead to the end of an expression
//Bash parses this (probably) as an arithmetic expression with a parenthesis inside
//BashSupport doesn't
testTokenization("(((1==1)))", LEFT_PAREN, EXPR_ARITH, ARITH_NUMBER, ARITH_EQ, ARITH_NUMBER, _EXPR_ARITH, RIGHT_PAREN);
//the grammar is a bit complicated, the expression parsed beginning with $((( depends on the end of the expression
//bash interprets the tokens $(((1+1)+1)) different than $(((1+1)) && echo)
//the first is an arithmetic expression with a sum computation
//the second is a subshell with an embedded arithmetic command and an echo command
//if an expression starts with three or more parentheses the rule is:
// if the expression ends with a single parenthesis, then the first opening parenthesis opens a subshell
// if the expression ends with two parentheses, then the first two start an arithmetic command
}
@Test
public void testIssue201() throws Exception {
testTokenization("((!foo))", EXPR_ARITH, ARITH_NEGATE, WORD, _EXPR_ARITH);
}
@Test |
<<<<<<<
@SerializedName("asEnv")
private List<io.kubernetes.client.models.V1EnvVar> asEnv = null;
=======
@SerializedName("asNodePort")
private Integer asNodePort = null;
>>>>>>>
<<<<<<<
public DomainSpec asEnv(List<io.kubernetes.client.models.V1EnvVar> asEnv) {
this.asEnv = asEnv;
return this;
}
public DomainSpec addAsEnvItem(io.kubernetes.client.models.V1EnvVar asEnvItem) {
if (this.asEnv == null) {
this.asEnv = new ArrayList<io.kubernetes.client.models.V1EnvVar>();
}
this.asEnv.add(asEnvItem);
return this;
}
/**
* Environment variables for use in starting the administration server.
*
* @return asEnv
**/
@ApiModelProperty(value = "Environment variables for use in starting the administration server.")
public List<io.kubernetes.client.models.V1EnvVar> getAsEnv() {
return asEnv;
}
public void setAsEnv(List<io.kubernetes.client.models.V1EnvVar> asEnv) {
this.asEnv = asEnv;
}
=======
public DomainSpec asNodePort(Integer asNodePort) {
this.asNodePort = asNodePort;
return this;
}
/**
* Administration server NodePort port. The port on each node on which the
* administration server will be exposed. If specified, this value must be an
* unused port. By default, the administration server will not be exposed
* outside the Kubernetes cluster.
*
* @return asNodePort
**/
@ApiModelProperty(value = "Administration server NodePort port. The port on each node on which the administration server will be exposed. If specified, this value must be an unused port. By default, the administration server will not be exposed outside the Kubernetes cluster.")
public Integer getAsNodePort() {
return asNodePort;
}
public void setAsNodePort(Integer asNodePort) {
this.asNodePort = asNodePort;
}
>>>>>>>
return this;
}
<<<<<<<
&& Objects.equals(this.asEnv, oracleKubernetesWeblogicDomainV1DomainSpec.asEnv)
=======
&& Objects.equals(this.asNodePort, oracleKubernetesWeblogicDomainV1DomainSpec.asNodePort)
>>>>>>>
<<<<<<<
return Objects.hash(domainUID, domainName, image, imagePullPolicy, adminSecret, asName, asPort, asEnv,
=======
return Objects.hash(domainUID, domainName, image, imagePullPolicy, adminSecret, asName, asPort, asNodePort,
>>>>>>>
return Objects.hash(domainUID, domainName, image, imagePullPolicy, adminSecret, asName, asPort,
<<<<<<<
sb.append(" asEnv: ").append(toIndentedString(asEnv)).append("\n");
=======
sb.append(" asNodePort: ").append(toIndentedString(asNodePort)).append("\n");
>>>>>>> |
<<<<<<<
=======
import io.kubernetes.client.models.V1beta1IngressList;
import java.util.concurrent.ScheduledFuture;
>>>>>>>
import java.util.concurrent.ScheduledFuture;
<<<<<<<
=======
private Step removeDomainPresenceInfo() {
return new Step() {
@Override
public NextAction apply(Packet packet) {
DomainPresenceInfoManager.remove(domainUID);
return doNext(packet);
}
};
}
private Step deleteIngresses() {
LOGGER.finer(MessageKeys.LIST_INGRESS_FOR_DOMAIN, this.domainUID, namespace);
return new CallBuilder()
.withLabelSelectors(forDomainUid(domainUID), CREATEDBYOPERATOR_LABEL)
.listIngressAsync(
namespace,
new ActionResponseStep<V1beta1IngressList>() {
@Override
Step createSuccessStep(V1beta1IngressList result, Step next) {
return new DeleteIngressListStep(result.getItems(), next);
}
});
}
static void cancelDomainStatusUpdating(DomainPresenceInfo info) {
ScheduledFuture<?> statusUpdater = info.getStatusUpdater().getAndSet(null);
if (statusUpdater != null) {
statusUpdater.cancel(true);
}
}
>>>>>>>
private Step removeDomainPresenceInfo() {
return new Step() {
@Override
public NextAction apply(Packet packet) {
DomainPresenceInfoManager.remove(domainUID);
return doNext(packet);
}
};
}
static void cancelDomainStatusUpdating(DomainPresenceInfo info) {
ScheduledFuture<?> statusUpdater = info.getStatusUpdater().getAndSet(null);
if (statusUpdater != null) {
statusUpdater.cancel(true);
}
} |
<<<<<<<
private static String opManagingdefaultAndtest1NSYamlFile =
"OperatorManagingdefaultAndtest1NS.yaml";
private static String opManagingtest2NSYamlFile = "OperatorManagingtest2NS.yaml";
private static final String opForBackwardCompatibility = "OperatorForBackwardCompatibility.yaml";
// file used to customize domain properties for domain, PV and LB inputs yaml
private static String domainOnPVUsingWLSTYamlFile = "DomainOnPVUsingWLST.yaml";
private static String domainOnPVUsingWDTYamlFile = "DomainOnPVUsingWDT.yaml";
private static String domainWithServerStartPolicyAsAdminOnlyYamlFile =
"DomainWithServerStartPolicyAsAdminOnly.yaml";
private static String domainWithStorageReclaimPolicyRecycleYamlFile =
"DomainWithStorageReclaimPolicyRecycle.yaml";
private static String domainWithDefaultValuesForSamplesYamlFile =
"DomainWithDefaultValuesForSamples.yaml";
=======
private static String op1YamlFile = "operator1.yaml";
private static String op2YamlFile = "operator2.yaml";
private static final String opForDelYamlFile1 = "operator_del1.yaml";
private static final String opForDelYamlFile2 = "operator_del2.yaml";
private static final String opForBackwardCompatibility = "operator_bc.yaml";
private static final String opForRESTCertChain = "operator_chain.yaml";
// property file used to customize domain properties for domain inputs yaml
private static String domain1YamlFile = "domain1.yaml";
private static String domain2YamlFile = "domain2.yaml";
private static String domain3YamlFile = "domain3.yaml";
private static String domain4YamlFile = "domain4.yaml";
private static String domain5YamlFile = "domain5.yaml";
private static String domain6YamlFile = "domain6.yaml";
private static String domain7YamlFile = "domain7.yaml";
private static String domain8YamlFile = "domain8.yaml";
private static final String domain1ForDelValueYamlFile = "domain_del_1.yaml";
private static final String domain2ForDelValueYamlFile = "domain_del_2.yaml";
private static final String domain3ForDelValueYamlFile = "domain_del_3.yaml";
private static String domain9YamlFile = "domain9.yaml";
private static String domain10YamlFile = "domain10.yaml";
private static String domain11YamlFile = "domain11.yaml";
private static String domain12YamlFile = "domain12.yaml";
>>>>>>>
private static String opManagingdefaultAndtest1NSYamlFile =
"OperatorManagingdefaultAndtest1NS.yaml";
private static String opManagingtest2NSYamlFile = "OperatorManagingtest2NS.yaml";
private static final String opForBackwardCompatibility = "OperatorForBackwardCompatibility.yaml";
private static final String opForRESTCertChain = "operator_chain.yaml";
// file used to customize domain properties for domain, PV and LB inputs yaml
private static String domainOnPVUsingWLSTYamlFile = "DomainOnPVUsingWLST.yaml";
private static String domainOnPVUsingWDTYamlFile = "DomainOnPVUsingWDT.yaml";
private static String domainWithServerStartPolicyAsAdminOnlyYamlFile =
"DomainWithServerStartPolicyAsAdminOnly.yaml";
private static String domainWithStorageReclaimPolicyRecycleYamlFile =
"DomainWithStorageReclaimPolicyRecycle.yaml";
private static String domainWithDefaultValuesForSamplesYamlFile =
"DomainWithDefaultValuesForSamples.yaml";
<<<<<<<
=======
domain8.createDomainOnExistingDirectory();
logger.info("SUCCESS - test8CreateDomainOnExistingDir");
}
/**
* Create operator and create domain with APACHE load balancer and verify domain is started
* successfully and access admin console via LB port. shutdown domain.
*
* @throws Exception
*/
// //@DisabledTest
public void testACreateDomainApacheLB() throws Exception {
Assume.assumeFalse(QUICKTEST);
logTestBegin("testACreateDomainApacheLB");
logger.info("Creating Domain domain9 & verifing the domain creation");
if (operator1 == null) {
operator1 = TestUtils.createOperator(op1YamlFile);
}
boolean testCompletedSuccessfully = false;
// create domain9
Domain domain9 = null;
try {
domain9 = testDomainCreation(domain9YamlFile);
domain9.verifyDomainCreated();
domain9.verifyAdminConsoleViaLB();
testCompletedSuccessfully = true;
} finally {
if (domain9 != null && (JENKINS || testCompletedSuccessfully)) domain9.destroy();
}
logger.info("SUCCESS - testACreateDomainApacheLB");
}
>>>>>>>
<<<<<<<
logger.info("SUCCESS - " + testMethodName);
=======
logger.info("SUCCESS - testRESTIdentityBackwardCompatibility");
}
/**
* Create operator and enable external rest endpoint using a certificate chain. This test uses the
* operator backward compatibility operator because that operator is destroyed.
*
* @throws Exception
*/
@Test
public void testOperatorRESTUsingCertificateChain() throws Exception {
Assume.assumeFalse(QUICKTEST);
logTestBegin("testOperatorRESTUsingCertificateChain");
logger.info("Checking if operatorForBackwardCompatibility is running, if not creating");
if (operatorForRESTCertChain == null) {
operatorForRESTCertChain = TestUtils.createOperator(opForRESTCertChain, RESTCertType.CHAIN);
}
operatorForRESTCertChain.verifyOperatorExternalRESTEndpoint();
logger.info("Operator using legacy REST identity created successfully");
logger.info("SUCCESS - testOperatorRESTUsingCertificateChain");
}
/**
* Create Operator and create domain with some junk value for t3 channel public address and using
* custom situational config override replace with valid public address using secret Verify the
* domain is started successfully and web application can be deployed and accessed.
*
* @throws Exception
*/
@Test
public void testCustomSitConfigOverrides() throws Exception {
Assume.assumeFalse(QUICKTEST);
logTestBegin("testCustomSitConfigOverrides");
if (operator1 == null) {
operator1 = TestUtils.createOperator(op1YamlFile);
}
Domain domain12 = null;
boolean testCompletedSuccessfully = false;
String createDomainScriptDir =
BaseTest.getProjectRoot() + "/integration-tests/src/test/resources/domain-home-on-pv";
try {
// cp py
Files.copy(
new File(createDomainScriptDir + "/create-domain.py").toPath(),
new File(createDomainScriptDir + "/create-domain.py.bak").toPath(),
StandardCopyOption.REPLACE_EXISTING);
Files.copy(
new File(createDomainScriptDir + "/create-domain-custom-sit-config.py").toPath(),
new File(createDomainScriptDir + "/create-domain.py").toPath(),
StandardCopyOption.REPLACE_EXISTING);
domain12 = testDomainCreation(domain12YamlFile);
domain12.verifyDomainCreated();
testBasicUseCases(domain12);
testAdminT3ChannelWithJMS(domain12);
// testAdvancedUseCasesForADomain(operator1, domain11);
testCompletedSuccessfully = true;
} finally {
Files.copy(
new File(createDomainScriptDir + "/create-domain.py.bak").toPath(),
new File(createDomainScriptDir + "/create-domain.py").toPath(),
StandardCopyOption.REPLACE_EXISTING);
if (domain12 != null && (JENKINS || testCompletedSuccessfully)) {
domain12.destroy();
}
}
logger.info("SUCCESS - testCustomSitConfigOverrides");
>>>>>>>
logger.info("SUCCESS - " + testMethodName);
}
/**
* Create operator and enable external rest endpoint using a certificate chain. This test uses the
* operator backward compatibility operator because that operator is destroyed.
*
* @throws Exception
*/
@Test
public void testOperatorRESTUsingCertificateChain() throws Exception {
Assume.assumeFalse(QUICKTEST);
logTestBegin("testOperatorRESTUsingCertificateChain");
logger.info("Checking if operatorForBackwardCompatibility is running, if not creating");
if (operatorForRESTCertChain == null) {
operatorForRESTCertChain = TestUtils.createOperator(opForRESTCertChain, RESTCertType.CHAIN);
}
operatorForRESTCertChain.verifyOperatorExternalRESTEndpoint();
logger.info("Operator using legacy REST identity created successfully");
logger.info("SUCCESS - testOperatorRESTUsingCertificateChain"); |
<<<<<<<
=======
/**
* The default desired state of servers.
*
* @return server defaults
*/
public Server getServerDefaults() {
return serverDefaults;
}
/**
* The default desired state of servers.
*
* @param serverDefaults server defaults
*/
public void setServerDefaults(Server serverDefaults) {
this.serverDefaults = serverDefaults;
}
/**
* The default desired state of servers.
*
* @param serverDefaults server defaults
* @return this
*/
public DomainSpec withServerDefaults(Server serverDefaults) {
this.serverDefaults = serverDefaults;
return this;
}
/**
* The default desired state of non-clustered servers.
*
* @return server defaults
*/
public NonClusteredServer getNonClusteredServerDefaults() {
return nonClusteredServerDefaults;
}
/**
* The default desired state of non-clustered servers.
*
* @param nonClusteredServerDefaults non-clustered server defaults
*/
public void setNonClusteredServerDefaults(NonClusteredServer nonClusteredServerDefaults) {
this.nonClusteredServerDefaults = nonClusteredServerDefaults;
}
/**
* The default desired state of non-clustered servers.
*
* @param nonClusteredServerDefaults non-clustered server defaults
* @return this
*/
public DomainSpec withNonClusteredServerDefaults(NonClusteredServer nonClusteredServerDefaults) {
this.nonClusteredServerDefaults = nonClusteredServerDefaults;
return this;
}
/**
* Maps the name of a non-clustered server to its desired state.
*
* <p>The server property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the server in nonClusteredServers property, and the property has
* been specified on that server, then use its value.
* <li>If not, and the property has been specified on the nonClusteredServerDefaults property,
* then use its value.
* <li>If not, and the property value has been specified on the serverDefaults property, then
* use its value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @return servers
*/
public Map<String, NonClusteredServer> getServers() {
return this.servers;
}
/**
* Maps the name of a non-clustered server to its desired state.
*
* <p>The server property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the server in nonClusteredServers property, and the property has
* been specified on that server, then use its value.
* <li>If not, and the property has been specified on the nonClusteredServerDefaults property,
* then use its value.
* <li>If not, and the property value has been specified on the serverDefaults property, then
* use its value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @param servers servers
*/
public void setServers(Map<String, NonClusteredServer> servers) {
this.servers = servers;
}
/**
* Maps the name of a non-clustered server to its desired state.
*
* <p>The server property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the server in nonClusteredServers property, and the property has
* been specified on that server, then use its value.
* <li>If not, and the property has been specified on the nonClusteredServerDefaults property,
* then use its value.
* <li>If not, and the property value has been specified on the serverDefaults property, then
* use its value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @param servers servers
* @return this
*/
public DomainSpec withServers(Map<String, NonClusteredServer> servers) {
this.servers = servers;
return this;
}
/**
* Maps the name of a non-clustered server to its desired state.
*
* <p>The server property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the server in nonClusteredServers property, and the property has
* been specified on that server, then use its value.
* <li>If not, and the property has been specified on the nonClusteredServerDefaults property,
* then use its value.
* <li>If not, and the property value has been specified on the serverDefaults property, then
* use its value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @param name cluster name
* @param server server
*/
public void setServer(String name, NonClusteredServer server) {
this.servers.put(name, server);
}
/**
* Maps the name of a non-clustered server to its desired state.
*
* <p>The server property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the server in nonClusteredServers property, and the property has
* been specified on that server, then use its value.
* <li>If not, and the property has been specified on the nonClusteredServerDefaults property,
* then use its value.
* <li>If not, and the property value has been specified on the serverDefaults property, then
* use its value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @param name name
* @param server server
* @return this
*/
public DomainSpec withServer(String name, NonClusteredServer server) {
this.servers.put(name, server);
return this;
}
/**
* The default desired state of clusters.
*
* @return cluster defaults
*/
public ClusterParams getClusterDefaults() {
return clusterDefaults;
}
/**
* The default desired state of clusters.
*
* @param clusterDefaults cluster defaults
*/
public void setClusterDefaults(ClusterParams clusterDefaults) {
this.clusterDefaults = clusterDefaults;
}
/**
* The default desired state of clusters.
*
* @param clusterDefaults cluster defaults
* @return this
*/
public DomainSpec withClusterDefaults(ClusterParams clusterDefaults) {
this.clusterDefaults = clusterDefaults;
return this;
}
/**
* Maps the name of a cluster to its desired state.
*
* <p>The cluster property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the cluster in the clusters property, and the property has been
* specified on that cluster, then use its value.
* <li>If not, and the property has been specified on the clusterDefaults property, then use its
* value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @return servers
*/
public Map<String, Cluster> getClusters() {
return this.clusters;
}
/**
* Maps the name of a cluster to its desired state.
*
* <p>The cluster property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the cluster in the clusters property, and the property has been
* specified on that cluster, then use its value.
* <li>If not, and the property has been specified on the clusterDefaults property, then use its
* value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @param clusters clusters
*/
public void setClusters(Map<String, Cluster> clusters) {
this.clusters = clusters;
}
/**
* Maps the name of a cluster to its desired state.
*
* <p>The cluster property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the cluster in the clusters property, and the property has been
* specified on that cluster, then use its value.
* <li>If not, and the property has been specified on the clusterDefaults property, then use its
* value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @param clusters clusters
* @return this
*/
public DomainSpec withClusters(Map<String, Cluster> clusters) {
this.clusters = clusters;
return this;
}
/**
* Maps the name of a cluster to its desired state.
*
* <p>The cluster property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the cluster in the clusters property, and the property has been
* specified on that cluster, then use its value.
* <li>If not, and the property has been specified on the clusterDefaults property, then use its
* value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @param name server name
* @param cluster cluster
*/
public void setCluster(String name, Cluster cluster) {
this.clusters.put(name, cluster);
}
/**
* Maps the name of a cluster to its desired state.
*
* <p>The cluster property values use the following defaulting rules:
*
* <ol>
* <li>If there is an entry for the cluster in the clusters property, and the property has been
* specified on that cluster, then use its value.
* <li>If not, and the property has been specified on the clusterDefaults property, then use its
* value.
* <li>If not, then use the default value for the property.
* </ol>
*
* @param name name
* @param cluster cluster
* @return this
*/
public DomainSpec withCluster(String name, Cluster cluster) {
this.clusters.put(name, cluster);
return this;
}
>>>>>>> |
<<<<<<<
=======
testSupport
.createCannedResponse("listService")
.withNamespace(NS)
.withLabelSelectors(forDomainUid(UID), CREATEDBYOPERATOR_LABEL)
.returning(services);
testSupport
.createCannedResponse("deleteCollection")
.withNamespace(NS)
.withLabelSelectors(forDomainUid(UID), CREATEDBYOPERATOR_LABEL)
.returning(new V1Status());
testSupport
.createCannedResponse("listIngress")
.withNamespace(NS)
.withLabelSelectors(forDomainUid(UID), CREATEDBYOPERATOR_LABEL)
.returning(ingresses);
testSupport
.createCannedResponse("deleteIngress")
.withNamespace(NS)
.withName("TEST-cluster1")
.ignoringBody()
.returning(new V1Status());
testSupport
.createCannedResponse("deleteIngress")
.withNamespace(NS)
.withName("TEST-cluster2")
.ignoringBody()
.returning(new V1Status());
>>>>>>>
testSupport
.createCannedResponse("listService")
.withNamespace(NS)
.withLabelSelectors(forDomainUid(UID), CREATEDBYOPERATOR_LABEL)
.returning(services);
testSupport
.createCannedResponse("deleteCollection")
.withNamespace(NS)
.withLabelSelectors(forDomainUid(UID), CREATEDBYOPERATOR_LABEL)
.returning(new V1Status()); |
<<<<<<<
domainMap.put(
"createDomainPyScript",
"integration-tests/src/test/resources/domain-home-on-pv/create-domain-auto-custom-sit-config.py");
=======
domainMap.put("voyagerWebPort", new Integer("30312"));
>>>>>>>
domainMap.put(
"createDomainPyScript",
"integration-tests/src/test/resources/domain-home-on-pv/create-domain-auto-custom-sit-config.py");
domainMap.put("voyagerWebPort", new Integer("30312")); |
<<<<<<<
=======
import static oracle.kubernetes.operator.create.CreateOperatorInputs.readDefaultInputsFile;
import static oracle.kubernetes.operator.create.ExecCreateOperator.execCreateOperator;
import static oracle.kubernetes.operator.create.ExecResultMatcher.succeedsAndPrints;
import static org.hamcrest.MatcherAssert.assertThat;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.logging.*;
>>>>>>>
import static oracle.kubernetes.operator.create.CreateOperatorInputs.readDefaultInputsFile;
import static oracle.kubernetes.operator.create.ExecCreateOperator.execCreateOperator;
import static oracle.kubernetes.operator.create.ExecResultMatcher.succeedsAndPrints;
import static org.hamcrest.MatcherAssert.assertThat;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.logging.*;
<<<<<<<
@Ignore
public void testDefaultNamespace() throws Exception {
ContainerResolver.getInstance().getContainer().getComponents().put(
ProcessingConstants.MAIN_COMPONENT_NAME,
Component.createFor(new CallBuilderFactory()));
=======
public void testAccountNoPrivs() throws Exception {
Assume.assumeTrue(TestUtils.isKubernetesAvailable());
>>>>>>>
public void testAccountNoPrivs() throws Exception {
Assume.assumeTrue(TestUtils.isKubernetesAvailable());
<<<<<<<
Component.createFor(new CallBuilderFactory()));
=======
Component.createFor(
ClientFactory.class, new ClientFactory() {
@Override
public ApiClient get() {
try {
//return ClientBuilder.standard().setAuthentication(
// new AccessTokenAuthentication(token)).build();
ApiClient client = ClientBuilder.standard().build();
client.addDefaultHeader("Impersonate-User", "system:serviceaccount:unit-test-namespace:alice");
return client;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
},
new CallBuilderFactory(null)));
ClientPool.getInstance().drain();
>>>>>>>
Component.createFor(
ClientFactory.class, new ClientFactory() {
@Override
public ApiClient get() {
try {
//return ClientBuilder.standard().setAuthentication(
// new AccessTokenAuthentication(token)).build();
ApiClient client = ClientBuilder.standard().build();
client.addDefaultHeader("Impersonate-User", "system:serviceaccount:unit-test-namespace:alice");
return client;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
},
new CallBuilderFactory()));
ClientPool.getInstance().drain();
<<<<<<<
Component.createFor(new CallBuilderFactory()));
=======
Component.createFor(
ClientFactory.class, new ClientFactory() {
@Override
public ApiClient get() {
try {
//return ClientBuilder.standard().setAuthentication(
// new AccessTokenAuthentication(token)).build();
ApiClient client = ClientBuilder.standard().build();
client.addDefaultHeader("Impersonate-User", "system:serviceaccount:unit-test-namespace:theo");
return client;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
},
new CallBuilderFactory(null)));
ClientPool.getInstance().drain();
>>>>>>>
Component.createFor(
ClientFactory.class, new ClientFactory() {
@Override
public ApiClient get() {
try {
//return ClientBuilder.standard().setAuthentication(
// new AccessTokenAuthentication(token)).build();
ApiClient client = ClientBuilder.standard().build();
client.addDefaultHeader("Impersonate-User", "system:serviceaccount:unit-test-namespace:theo");
return client;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
},
new CallBuilderFactory()));
ClientPool.getInstance().drain(); |
<<<<<<<
container.addCommandItem("/shared/domain/" + weblogicDomainName + "/nodemgr_home/startServer.sh");
container.addCommandItem(weblogicDomainUID);
container.addCommandItem(spec.getAsName());
=======
V1VolumeMount volumeMountScripts = new V1VolumeMount();
volumeMountScripts.setName("scripts");
volumeMountScripts.setMountPath("/weblogic-operator/scripts");
volumeMountScripts.setReadOnly(true);
container.addVolumeMountsItem(volumeMountScripts);
container.addCommandItem("/shared/domain/" + weblogicDomainName + "/servers/" + spec.getAsName() + "/nodemgr_home/startServer.sh");
>>>>>>>
V1VolumeMount volumeMountScripts = new V1VolumeMount();
volumeMountScripts.setName("scripts");
volumeMountScripts.setMountPath("/weblogic-operator/scripts");
volumeMountScripts.setReadOnly(true);
container.addVolumeMountsItem(volumeMountScripts);
container.addCommandItem("/shared/domain/" + weblogicDomainName + "/nodemgr_home/startServer.sh");
container.addCommandItem(weblogicDomainUID);
container.addCommandItem(spec.getAsName());
<<<<<<<
V1ExecAction livenessExecAction = new V1ExecAction();
livenessExecAction.addCommandItem("/shared/domain/" + weblogicDomainName + "/nodemgr_home/livenessProbe.sh");
livenessExecAction.addCommandItem(weblogicDomainName);
livenessExecAction.addCommandItem(spec.getAsName());
livenessProbe.exec(livenessExecAction);
=======
V1ExecAction livenessAction = new V1ExecAction();
livenessAction.addCommandItem("/weblogic-operator/scripts/livenessProbe.sh");
livenessAction.addCommandItem(weblogicDomainName);
livenessAction.addCommandItem(spec.getAsName());
livenessProbe.exec(livenessAction);
>>>>>>>
V1ExecAction livenessAction = new V1ExecAction();
livenessAction.addCommandItem("/weblogic-operator/scripts/livenessProbe.sh");
livenessAction.addCommandItem(weblogicDomainName);
livenessAction.addCommandItem(spec.getAsName());
livenessProbe.exec(livenessAction);
<<<<<<<
container.addCommandItem("/shared/domain/" + weblogicDomainName + "/nodemgr_home/startServer.sh");
container.addCommandItem(weblogicDomainUID);
container.addCommandItem(weblogicServerName);
container.addCommandItem(spec.getAsName());
container.addCommandItem(String.valueOf(spec.getAsPort()));
=======
V1VolumeMount volumeMountScripts = new V1VolumeMount();
volumeMountScripts.setName("scripts");
volumeMountScripts.setMountPath("/weblogic-operator/scripts");
volumeMountScripts.setReadOnly(true);
container.addVolumeMountsItem(volumeMountScripts);
container.addCommandItem("/shared/domain/" + weblogicDomainName + "/servers/" + weblogicServerName + "/nodemgr_home/startServer.sh");
>>>>>>>
V1VolumeMount volumeMountScripts = new V1VolumeMount();
volumeMountScripts.setName("scripts");
volumeMountScripts.setMountPath("/weblogic-operator/scripts");
volumeMountScripts.setReadOnly(true);
container.addVolumeMountsItem(volumeMountScripts);
container.addCommandItem("/shared/domain/" + weblogicDomainName + "/nodemgr_home/startServer.sh");
container.addCommandItem(weblogicDomainUID);
container.addCommandItem(weblogicServerName);
container.addCommandItem(spec.getAsName());
container.addCommandItem(String.valueOf(spec.getAsPort()));
<<<<<<<
V1ExecAction execAction = new V1ExecAction();
execAction.addCommandItem("/shared/domain/" + weblogicDomainName + "/nodemgr_home/livenessProbe.sh");
execAction.addCommandItem(weblogicDomainName);
execAction.addCommandItem(weblogicServerName);
livenessProbe.exec(execAction);
=======
V1ExecAction livenessAction = new V1ExecAction();
livenessAction.addCommandItem("/weblogic-operator/scripts/livenessProbe.sh");
livenessAction.addCommandItem(weblogicDomainName);
livenessAction.addCommandItem(weblogicServerName);
livenessProbe.exec(livenessAction);
>>>>>>>
V1ExecAction livenessAction = new V1ExecAction();
livenessAction.addCommandItem("/weblogic-operator/scripts/livenessProbe.sh");
livenessAction.addCommandItem(weblogicDomainName);
livenessAction.addCommandItem(weblogicServerName);
livenessProbe.exec(livenessAction); |
<<<<<<<
import java.util.concurrent.atomic.AtomicInteger;
=======
import oracle.kubernetes.operator.Pair;
>>>>>>>
import java.util.concurrent.atomic.AtomicInteger;
import oracle.kubernetes.operator.Pair;
<<<<<<<
ServerHealth health = createServerHealthFromResult(result);
=======
Pair<String, ServerHealth> pair = parseServerHealthJson(jsonResult);
String state = pair.getLeft();
if (state != null && !state.isEmpty()) {
ConcurrentMap<String, String> serverStateMap =
(ConcurrentMap<String, String>) packet.get(SERVER_STATE_MAP);
info.updateLastKnownServerStatus(serverName, state);
serverStateMap.put(serverName, state);
}
>>>>>>>
Pair<String, ServerHealth> pair = createServerHealthFromResult(result);
String state = pair.getLeft();
if (state != null && !state.isEmpty()) {
ConcurrentMap<String, String> serverStateMap =
(ConcurrentMap<String, String>) packet.get(SERVER_STATE_MAP);
info.updateLastKnownServerStatus(serverName, state);
serverStateMap.put(serverName, state);
}
<<<<<<<
serverHealthMap.put((String) packet.get(ProcessingConstants.SERVER_NAME), health);
AtomicInteger remainingServersHealthToRead =
packet.getValue(ProcessingConstants.REMAINING_SERVERS_HEALTH_TO_READ);
remainingServersHealthToRead.getAndDecrement();
=======
serverHealthMap.put(
(String) packet.get(ProcessingConstants.SERVER_NAME), pair.getRight());
packet.put(ProcessingConstants.SERVER_HEALTH_READ, Boolean.TRUE);
>>>>>>>
serverHealthMap.put(
(String) packet.get(ProcessingConstants.SERVER_NAME), pair.getRight());
AtomicInteger remainingServersHealthToRead =
packet.getValue(ProcessingConstants.REMAINING_SERVERS_HEALTH_TO_READ);
remainingServersHealthToRead.getAndDecrement();
<<<<<<<
private ServerHealth createServerHealthFromResult(Result restResult) throws IOException {
if (restResult.isSuccessful()) {
return parseServerHealthJson(restResult.getResponse());
}
return new ServerHealth()
.withOverallHealth(
restResult.isServerOverloaded()
? OVERALL_HEALTH_FOR_SERVER_OVERLOADED
: OVERALL_HEALTH_NOT_AVAILABLE);
}
private ServerHealth parseServerHealthJson(String jsonResult) throws IOException {
=======
private Pair<String, ServerHealth> parseServerHealthJson(String jsonResult) throws IOException {
>>>>>>>
private Pair<String, ServerHealth> createServerHealthFromResult(Result restResult)
throws IOException {
if (restResult.isSuccessful()) {
return parseServerHealthJson(restResult.getResponse());
}
return new Pair<>(
"",
new ServerHealth()
.withOverallHealth(
restResult.isServerOverloaded()
? OVERALL_HEALTH_FOR_SERVER_OVERLOADED
: OVERALL_HEALTH_NOT_AVAILABLE));
}
private Pair<String, ServerHealth> parseServerHealthJson(String jsonResult) throws IOException { |
<<<<<<<
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import oracle.kubernetes.operator.utils.CreateDomainInputs;
import static oracle.kubernetes.operator.utils.CreateDomainInputs.*;
import oracle.kubernetes.operator.utils.ExecCreateDomain;
import oracle.kubernetes.operator.utils.ExecResult;
import static oracle.kubernetes.operator.utils.ExecResultMatcher.*;
import oracle.kubernetes.operator.utils.GeneratedDomainYamlFiles;
import oracle.kubernetes.operator.utils.UserProjects;
=======
import static oracle.kubernetes.operator.create.CreateDomainInputs.*;
import static oracle.kubernetes.operator.create.ExecResultMatcher.*;
>>>>>>>
import static oracle.kubernetes.operator.utils.CreateDomainInputs.*;
import static oracle.kubernetes.operator.utils.ExecResultMatcher.*; |
<<<<<<<
import oracle.kubernetes.operator.utils.CreateOperatorInputs;
import static oracle.kubernetes.operator.utils.KubernetesArtifactUtils.*;
=======
>>>>>>>
import oracle.kubernetes.operator.utils.CreateOperatorInputs; |
<<<<<<<
import io.kubernetes.client.util.Watch;
=======
import io.kubernetes.client.models.V1beta1Ingress;
import io.kubernetes.client.models.V1beta1IngressList;
>>>>>>>
import io.kubernetes.client.util.Watch;
import io.kubernetes.client.models.V1beta1Ingress;
import io.kubernetes.client.models.V1beta1IngressList;
<<<<<<<
private static void dispatchServiceWatch(Watch.Response<V1Service> item) {
V1Service s = item.object;
if (s != null) {
V1ObjectMeta metadata = s.getMetadata();
String domainUID = metadata.getLabels().get(LabelConstants.DOMAINUID_LABEL);
String serverName = metadata.getLabels().get(LabelConstants.SERVERNAME_LABEL);
String channelName = metadata.getLabels().get(LabelConstants.CHANNELNAME_LABEL);
String clusterName = metadata.getLabels().get(LabelConstants.CLUSTERNAME_LABEL);
if (domainUID != null) {
DomainPresenceInfo info = DomainPresenceInfoManager.lookup(domainUID);
ServerKubernetesObjects sko = null;
if (info != null) {
if (serverName != null) {
sko = ServerKubernetesObjectsManager.getOrCreate(info, domainUID, serverName);
}
switch (item.type) {
case "ADDED":
if (sko != null) {
if (channelName != null) {
sko.getChannels().put(channelName, s);
} else {
sko.getService().set(s);
}
} else if (clusterName != null) {
info.getClusters().put(clusterName, s);
}
break;
case "MODIFIED":
if (sko != null) {
if (channelName != null) {
V1Service skoService = sko.getChannels().get(channelName);
if (skoService != null) {
sko.getChannels().replace(channelName, skoService, s);
}
} else {
V1Service skoService = sko.getService().get();
if (skoService != null) {
sko.getService().compareAndSet(skoService, s);
}
}
} else if (clusterName != null) {
V1Service clusterService = info.getClusters().get(clusterName);
if (clusterService != null) {
info.getClusters().replace(clusterName, clusterService, s);
}
}
break;
case "DELETED":
if (sko != null) {
if (channelName != null) {
V1Service oldService = sko.getChannels().remove(channelName);
if (oldService != null) {
// Service was deleted, but sko still contained a non-null entry
LOGGER.info(
MessageKeys.SERVER_SERVICE_DELETED,
domainUID,
metadata.getNamespace(),
serverName);
doCheckAndCreateDomainPresence(info.getDomain(), true);
}
} else {
V1Service oldService = sko.getService().getAndSet(null);
if (oldService != null) {
// Service was deleted, but sko still contained a non-null entry
LOGGER.info(
MessageKeys.SERVER_SERVICE_DELETED,
domainUID,
metadata.getNamespace(),
serverName);
doCheckAndCreateDomainPresence(info.getDomain(), true);
}
}
} else if (clusterName != null) {
V1Service oldService = info.getClusters().remove(clusterName);
if (oldService != null) {
// Service was deleted, but clusters still contained a non-null entry
LOGGER.info(
MessageKeys.CLUSTER_SERVICE_DELETED,
domainUID,
metadata.getNamespace(),
clusterName);
doCheckAndCreateDomainPresence(info.getDomain(), true);
}
}
break;
case "ERROR":
default:
}
}
}
}
}
private static void dispatchConfigMapWatch(Watch.Response<V1ConfigMap> item) {
V1ConfigMap c = item.object;
if (c != null) {
switch (item.type) {
case "MODIFIED":
case "DELETED":
runSteps(
ConfigMapHelper.createScriptConfigMapStep(
getOperatorNamespace(), c.getMetadata().getNamespace()));
break;
case "ERROR":
default:
}
}
}
/**
* Dispatch the Domain event to the appropriate handler.
*
* @param item An item received from a Watch response.
*/
private static void dispatchDomainWatch(Watch.Response<Domain> item) {
Domain d;
String domainUID;
switch (item.type) {
case "ADDED":
case "MODIFIED":
d = item.object;
domainUID = d.getSpec().getDomainUID();
LOGGER.info(MessageKeys.WATCH_DOMAIN, domainUID);
doCheckAndCreateDomainPresence(d, true);
break;
case "DELETED":
d = item.object;
domainUID = d.getSpec().getDomainUID();
LOGGER.info(MessageKeys.WATCH_DOMAIN_DELETED, domainUID);
deleteDomainPresence(d);
break;
case "ERROR":
default:
}
}
private static String getOperatorNamespace() {
=======
private static IngressWatcher createIngressWatcher(String ns, String initialResourceVersion) {
return IngressWatcher.create(
getThreadFactory(),
ns,
initialResourceVersion,
DomainProcessor::dispatchIngressWatch,
isNamespaceStopping(ns));
}
static String getOperatorNamespace() {
>>>>>>>
private static void dispatchServiceWatch(Watch.Response<V1Service> item) {
V1Service s = item.object;
if (s != null) {
V1ObjectMeta metadata = s.getMetadata();
String domainUID = metadata.getLabels().get(LabelConstants.DOMAINUID_LABEL);
String serverName = metadata.getLabels().get(LabelConstants.SERVERNAME_LABEL);
String channelName = metadata.getLabels().get(LabelConstants.CHANNELNAME_LABEL);
String clusterName = metadata.getLabels().get(LabelConstants.CLUSTERNAME_LABEL);
if (domainUID != null) {
DomainPresenceInfo info = DomainPresenceInfoManager.lookup(domainUID);
ServerKubernetesObjects sko = null;
if (info != null) {
if (serverName != null) {
sko = ServerKubernetesObjectsManager.getOrCreate(info, domainUID, serverName);
}
switch (item.type) {
case "ADDED":
if (sko != null) {
if (channelName != null) {
sko.getChannels().put(channelName, s);
} else {
sko.getService().set(s);
}
} else if (clusterName != null) {
info.getClusters().put(clusterName, s);
}
break;
case "MODIFIED":
if (sko != null) {
if (channelName != null) {
V1Service skoService = sko.getChannels().get(channelName);
if (skoService != null) {
sko.getChannels().replace(channelName, skoService, s);
}
} else {
V1Service skoService = sko.getService().get();
if (skoService != null) {
sko.getService().compareAndSet(skoService, s);
}
}
} else if (clusterName != null) {
V1Service clusterService = info.getClusters().get(clusterName);
if (clusterService != null) {
info.getClusters().replace(clusterName, clusterService, s);
}
}
break;
case "DELETED":
if (sko != null) {
if (channelName != null) {
V1Service oldService = sko.getChannels().remove(channelName);
if (oldService != null) {
// Service was deleted, but sko still contained a non-null entry
LOGGER.info(
MessageKeys.SERVER_SERVICE_DELETED,
domainUID,
metadata.getNamespace(),
serverName);
doCheckAndCreateDomainPresence(info.getDomain(), true);
}
} else {
V1Service oldService = sko.getService().getAndSet(null);
if (oldService != null) {
// Service was deleted, but sko still contained a non-null entry
LOGGER.info(
MessageKeys.SERVER_SERVICE_DELETED,
domainUID,
metadata.getNamespace(),
serverName);
doCheckAndCreateDomainPresence(info.getDomain(), true);
}
}
} else if (clusterName != null) {
V1Service oldService = info.getClusters().remove(clusterName);
if (oldService != null) {
// Service was deleted, but clusters still contained a non-null entry
LOGGER.info(
MessageKeys.CLUSTER_SERVICE_DELETED,
domainUID,
metadata.getNamespace(),
clusterName);
doCheckAndCreateDomainPresence(info.getDomain(), true);
}
}
break;
case "ERROR":
default:
}
}
}
}
}
private static void dispatchConfigMapWatch(Watch.Response<V1ConfigMap> item) {
V1ConfigMap c = item.object;
if (c != null) {
switch (item.type) {
case "MODIFIED":
case "DELETED":
runSteps(
ConfigMapHelper.createScriptConfigMapStep(
getOperatorNamespace(), c.getMetadata().getNamespace()));
break;
case "ERROR":
default:
}
}
}
/**
* Dispatch the Domain event to the appropriate handler.
*
* @param item An item received from a Watch response.
*/
private static void dispatchDomainWatch(Watch.Response<Domain> item) {
Domain d;
String domainUID;
switch (item.type) {
case "ADDED":
case "MODIFIED":
d = item.object;
domainUID = d.getSpec().getDomainUID();
LOGGER.info(MessageKeys.WATCH_DOMAIN, domainUID);
doCheckAndCreateDomainPresence(d, true);
break;
case "DELETED":
d = item.object;
domainUID = d.getSpec().getDomainUID();
LOGGER.info(MessageKeys.WATCH_DOMAIN_DELETED, domainUID);
deleteDomainPresence(d);
break;
case "ERROR":
default:
}
}
static String getOperatorNamespace() { |
<<<<<<<
private static final String domain1ForDelValueYamlFile = "domain_del_1.yaml";
private static final String domain2ForDelValueYamlFile = "domain_del_2.yaml";
private static final String domain3ForDelValueYamlFile = "domain_del_3.yaml";
=======
private static String domain9YamlFile = "domain9.yaml";
private static String domain10YamlFile = "domain10.yaml";
>>>>>>>
private static final String domain1ForDelValueYamlFile = "domain_del_1.yaml";
private static final String domain2ForDelValueYamlFile = "domain_del_2.yaml";
private static final String domain3ForDelValueYamlFile = "domain_del_3.yaml";
private static String domain9YamlFile = "domain9.yaml";
private static String domain10YamlFile = "domain10.yaml"; |
<<<<<<<
import org.springframework.ide.vscode.commons.java.IJavaProject;
import org.springframework.ide.vscode.commons.util.BadLocationException;
=======
>>>>>>>
import org.springframework.ide.vscode.commons.java.IJavaProject; |
<<<<<<<
import org.springframework.ide.vscode.commons.boot.app.cli.requestmappings.RequestMapping;
=======
import org.springframework.ide.vscode.commons.boot.app.cli.livebean.LiveBeansModel;
>>>>>>>
import org.springframework.ide.vscode.commons.boot.app.cli.requestmappings.RequestMapping;
import org.springframework.ide.vscode.commons.boot.app.cli.livebean.LiveBeansModel; |
<<<<<<<
private void scanFiles(File directory) {
=======
private boolean containsCharacters(char[] symbolChars, char[] queryChars) {
int symbolindex = 0;
int queryindex = 0;
while (queryindex < queryChars.length && symbolindex < symbolChars.length) {
if (symbolChars[symbolindex] == queryChars[queryindex]) {
queryindex++;
}
symbolindex++;
}
return queryindex == queryChars.length;
}
private void scanFiles(WorkspaceFolder directory) {
>>>>>>>
private void scanFiles(WorkspaceFolder directory) { |
<<<<<<<
import org.springframework.ide.vscode.commons.boot.app.cli.requestmappings.RequestMapping;
import org.springframework.ide.vscode.commons.boot.app.cli.requestmappings.RequestMappingImpl1;
=======
import org.springframework.ide.vscode.commons.boot.app.cli.livebean.LiveBeansModel;
>>>>>>>
import org.springframework.ide.vscode.commons.boot.app.cli.requestmappings.RequestMapping;
import org.springframework.ide.vscode.commons.boot.app.cli.requestmappings.RequestMappingImpl1;
import org.springframework.ide.vscode.commons.boot.app.cli.livebean.LiveBeansModel;
<<<<<<<
public static Collection<RequestMapping> parseRequestMappingsJson(String json) {
JSONObject obj = new JSONObject(json);
Iterator<String> keys = obj.keys();
List<RequestMapping> result = new ArrayList<>();
while (keys.hasNext()) {
String rawKey = keys.next();
JSONObject value = obj.getJSONObject(rawKey);
result.add(new RequestMappingImpl1(rawKey, value));
}
return result;
}
public Collection<RequestMapping> getRequestMappings() throws Exception {
=======
public LiveBeansModel getBeans() throws Exception {
String json = getBeansJson();
if (StringUtil.hasText(json)) {
return LiveBeansModel.parse(json);
}
return null;
}
public String getRequestMappings() throws Exception {
>>>>>>>
public LiveBeansModel getBeans() throws Exception {
String json = getBeansJson();
if (StringUtil.hasText(json)) {
return LiveBeansModel.parse(json);
}
return null;
}
public static Collection<RequestMapping> parseRequestMappingsJson(String json) {
JSONObject obj = new JSONObject(json);
Iterator<String> keys = obj.keys();
List<RequestMapping> result = new ArrayList<>();
while (keys.hasNext()) {
String rawKey = keys.next();
JSONObject value = obj.getJSONObject(rawKey);
result.add(new RequestMappingImpl1(rawKey, value));
}
return result;
}
public Collection<RequestMapping> getRequestMappings() throws Exception { |
<<<<<<<
@Test
public void testSetBlobStoresNull() throws ConfigurationException{
expected.expect(NullPointerException.class);
expected.expectMessage("stores is null");
mediator.setBlobStores(null);
}
@Test
public void testSetBlobStoresWrapsStorageException() throws Exception{
when(xmlConfig.getBlobStores()).thenReturn(ImmutableList.<BlobStoreConfig>of());
CompositeBlobStore composite = mock(CompositeBlobStore.class);
doReturn(composite).when(mediator).getCompositeBlobStore();
StorageException se = new StorageException("expected");
doThrow(se).when(composite).setBlobStores(any(Iterable.class));
expected.expect(ConfigurationException.class);
expected.expectMessage("Error connecting to BlobStore");
mediator.setBlobStores(ImmutableList.<BlobStoreConfig>of());
}
@Test
public void testSetBlobStoresSavesConfig() throws Exception{
when(xmlConfig.getBlobStores()).thenReturn(ImmutableList.<BlobStoreConfig>of());
CompositeBlobStore composite = mock(CompositeBlobStore.class);
doReturn(composite).when(mediator).getCompositeBlobStore();
List<BlobStoreConfig> configList = Lists.newArrayList(mock(BlobStoreConfig.class), mock(BlobStoreConfig.class));
when(xmlConfig.getBlobStores()).thenReturn(configList);
BlobStoreConfig config = new FileBlobStoreConfig();
List<BlobStoreConfig> newStores = ImmutableList.<BlobStoreConfig>of(config);
mediator.setBlobStores(newStores);
verify(composite, times(1)).setBlobStores(same(newStores));
verify(xmlConfig, times(1)).save();
assertEquals(newStores, configList);
}
@Test
public void testSetBlobStoresRestoresRuntimeStoresOnSaveFailure() throws Exception{
when(xmlConfig.getBlobStores()).thenReturn(ImmutableList.<BlobStoreConfig>of());
CompositeBlobStore composite = mock(CompositeBlobStore.class);
doReturn(composite).when(mediator).getCompositeBlobStore();
doThrow(new IOException("expected")).when(xmlConfig).save();
List<BlobStoreConfig> oldStores = Lists.newArrayList(mock(BlobStoreConfig.class), mock(BlobStoreConfig.class));
when(xmlConfig.getBlobStores()).thenReturn(oldStores);
BlobStoreConfig config = new FileBlobStoreConfig();
List<BlobStoreConfig> newStores = ImmutableList.<BlobStoreConfig>of(config);
try {
mediator.setBlobStores(newStores);
fail("Expected ConfigurationException");
} catch (ConfigurationException e) {
assertTrue(e.getMessage().contains("Error saving config"));
}
verify(xmlConfig, times(1)).save();
verify(composite, times(1)).setBlobStores(same(newStores));
verify(composite, times(1)).setBlobStores(eq(oldStores));
}
=======
@Test
public void testGetDefaultAdvertisedCachedFormats() {
// from src/main/resources/org/geoserver/gwc/advertised_formats.properties
Set<String> defaultFormats = ImmutableSet.of("image/png", "image/png8", "image/jpeg",
"image/gif");
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.VECTOR));
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.REMOTE));
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.RASTER));
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.WMS));
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.GROUP));
}
@Test
public void testGetPluggabledAdvertisedCachedFormats() throws IOException {
List<URL> urls;
try {
// load the default and test resources separately so they are named differently and we
// don't get the ones for testing listed in the UI when running from eclipse
String defaultResource = "org/geoserver/gwc/advertised_formats.properties";
String testResource = "org/geoserver/gwc/advertised_formats_unittesting.properties";
ClassLoader classLoader = GWC.class.getClassLoader();
urls = newArrayList(forEnumeration(classLoader.getResources(defaultResource)));
urls.addAll(newArrayList(forEnumeration(classLoader.getResources(testResource))));
} catch (IOException e) {
throw Throwables.propagate(e);
}
// from src/main/resources/org/geoserver/gwc/advertised_formats.properties
Set<String> defaultFormats = ImmutableSet.of("image/png", "image/png8", "image/jpeg",
"image/gif");
// see src/test/resources/org/geoserver/gwc/advertised_formats.properties
Set<String> expectedVector = union(defaultFormats,
ImmutableSet.of("test/vector1", "test/vector2"));
Set<String> expectedRaster = union(defaultFormats,
ImmutableSet.of("test/raster1", "test/raster2;type=test"));
Set<String> expectedGroup = union(defaultFormats,
ImmutableSet.of("test/group1", "test/group2"));
assertEquals(expectedVector, GWC.getAdvertisedCachedFormats(PublishedType.VECTOR, urls));
assertEquals(expectedVector, GWC.getAdvertisedCachedFormats(PublishedType.REMOTE, urls));
assertEquals(expectedRaster, GWC.getAdvertisedCachedFormats(PublishedType.RASTER, urls));
assertEquals(expectedRaster, GWC.getAdvertisedCachedFormats(PublishedType.WMS, urls));
assertEquals(expectedGroup, GWC.getAdvertisedCachedFormats(PublishedType.GROUP, urls));
}
>>>>>>>
@Test
public void testGetDefaultAdvertisedCachedFormats() {
// from src/main/resources/org/geoserver/gwc/advertised_formats.properties
Set<String> defaultFormats = ImmutableSet.of("image/png", "image/png8", "image/jpeg",
"image/gif");
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.VECTOR));
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.REMOTE));
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.RASTER));
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.WMS));
assertEquals(defaultFormats, GWC.getAdvertisedCachedFormats(PublishedType.GROUP));
}
@Test
public void testGetPluggabledAdvertisedCachedFormats() throws IOException {
List<URL> urls;
try {
// load the default and test resources separately so they are named differently and we
// don't get the ones for testing listed in the UI when running from eclipse
String defaultResource = "org/geoserver/gwc/advertised_formats.properties";
String testResource = "org/geoserver/gwc/advertised_formats_unittesting.properties";
ClassLoader classLoader = GWC.class.getClassLoader();
urls = newArrayList(forEnumeration(classLoader.getResources(defaultResource)));
urls.addAll(newArrayList(forEnumeration(classLoader.getResources(testResource))));
} catch (IOException e) {
throw Throwables.propagate(e);
}
// from src/main/resources/org/geoserver/gwc/advertised_formats.properties
Set<String> defaultFormats = ImmutableSet.of("image/png", "image/png8", "image/jpeg",
"image/gif");
// see src/test/resources/org/geoserver/gwc/advertised_formats.properties
Set<String> expectedVector = union(defaultFormats,
ImmutableSet.of("test/vector1", "test/vector2"));
Set<String> expectedRaster = union(defaultFormats,
ImmutableSet.of("test/raster1", "test/raster2;type=test"));
Set<String> expectedGroup = union(defaultFormats,
ImmutableSet.of("test/group1", "test/group2"));
assertEquals(expectedVector, GWC.getAdvertisedCachedFormats(PublishedType.VECTOR, urls));
assertEquals(expectedVector, GWC.getAdvertisedCachedFormats(PublishedType.REMOTE, urls));
assertEquals(expectedRaster, GWC.getAdvertisedCachedFormats(PublishedType.RASTER, urls));
assertEquals(expectedRaster, GWC.getAdvertisedCachedFormats(PublishedType.WMS, urls));
assertEquals(expectedGroup, GWC.getAdvertisedCachedFormats(PublishedType.GROUP, urls));
}
@Test
public void testSetBlobStoresNull() throws ConfigurationException{
expected.expect(NullPointerException.class);
expected.expectMessage("stores is null");
mediator.setBlobStores(null);
}
@Test
public void testSetBlobStoresWrapsStorageException() throws Exception{
when(xmlConfig.getBlobStores()).thenReturn(ImmutableList.<BlobStoreConfig>of());
CompositeBlobStore composite = mock(CompositeBlobStore.class);
doReturn(composite).when(mediator).getCompositeBlobStore();
StorageException se = new StorageException("expected");
doThrow(se).when(composite).setBlobStores(any(Iterable.class));
expected.expect(ConfigurationException.class);
expected.expectMessage("Error connecting to BlobStore");
mediator.setBlobStores(ImmutableList.<BlobStoreConfig>of());
}
@Test
public void testSetBlobStoresSavesConfig() throws Exception{
when(xmlConfig.getBlobStores()).thenReturn(ImmutableList.<BlobStoreConfig>of());
CompositeBlobStore composite = mock(CompositeBlobStore.class);
doReturn(composite).when(mediator).getCompositeBlobStore();
List<BlobStoreConfig> configList = Lists.newArrayList(mock(BlobStoreConfig.class), mock(BlobStoreConfig.class));
when(xmlConfig.getBlobStores()).thenReturn(configList);
BlobStoreConfig config = new FileBlobStoreConfig();
List<BlobStoreConfig> newStores = ImmutableList.<BlobStoreConfig>of(config);
mediator.setBlobStores(newStores);
verify(composite, times(1)).setBlobStores(same(newStores));
verify(xmlConfig, times(1)).save();
assertEquals(newStores, configList);
}
@Test
public void testSetBlobStoresRestoresRuntimeStoresOnSaveFailure() throws Exception{
when(xmlConfig.getBlobStores()).thenReturn(ImmutableList.<BlobStoreConfig>of());
CompositeBlobStore composite = mock(CompositeBlobStore.class);
doReturn(composite).when(mediator).getCompositeBlobStore();
doThrow(new IOException("expected")).when(xmlConfig).save();
List<BlobStoreConfig> oldStores = Lists.newArrayList(mock(BlobStoreConfig.class), mock(BlobStoreConfig.class));
when(xmlConfig.getBlobStores()).thenReturn(oldStores);
BlobStoreConfig config = new FileBlobStoreConfig();
List<BlobStoreConfig> newStores = ImmutableList.<BlobStoreConfig>of(config);
try {
mediator.setBlobStores(newStores);
fail("Expected ConfigurationException");
} catch (ConfigurationException e) {
assertTrue(e.getMessage().contains("Error saving config"));
}
verify(xmlConfig, times(1)).save();
verify(composite, times(1)).setBlobStores(same(newStores));
verify(composite, times(1)).setBlobStores(eq(oldStores));
} |
<<<<<<<
=======
@Override
public boolean isFeaturesReprojectionDisabled() {
return featuresReprojectionDisabled;
}
@Override
public void setFeaturesReprojectionDisabled(boolean featuresReprojectionDisabled) {
this.featuresReprojectionDisabled = featuresReprojectionDisabled;
}
>>>>>>>
@Override
public boolean isFeaturesReprojectionDisabled() {
return featuresReprojectionDisabled;
}
@Override
public void setFeaturesReprojectionDisabled(boolean featuresReprojectionDisabled) {
this.featuresReprojectionDisabled = featuresReprojectionDisabled;
} |
<<<<<<<
import org.apache.wicket.Component;
=======
import java.util.Collections;
import java.util.List;
import java.util.logging.Level;
import org.apache.wicket.PageParameters;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.form.OnChangeAjaxBehavior;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.DropDownChoice;
>>>>>>>
import java.util.Collections;
import java.util.List;
import java.util.logging.Level;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.form.OnChangeAjaxBehavior;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.DropDownChoice;
<<<<<<<
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.util.visit.IVisitor;
=======
import org.apache.wicket.validation.IValidatable;
import org.apache.wicket.validation.validator.AbstractValidator;
import org.geoserver.catalog.Catalog;
import org.geoserver.catalog.CatalogBuilder;
>>>>>>>
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.validation.IValidatable;
import org.geoserver.catalog.Catalog;
import org.geoserver.catalog.CatalogBuilder; |
<<<<<<<
import java.util.Comparator;
=======
import java.util.HashMap;
>>>>>>>
import java.util.HashMap;
import java.util.Comparator;
<<<<<<<
protected CatalogFacade catalogFacade;
protected int offset;
protected int count;
protected SortBy[] sortOrder;
protected Filter filter;
protected int index;
protected Comparator<Info> comparator;
public CatalogStoreFeatureIterator(int offset, int count, SortBy[] sortOrder, Filter filter, Catalog catalog, CatalogStoreMapping mapping, RecordDescriptor recordDescriptor) {
this.offset = offset;
this.count = count;
this.sortOrder = sortOrder;
this.filter = filter;
catalogFacade = catalog.getFacade();
=======
protected Map<String, String> interpolationProperties = new HashMap<String, String>();
public CatalogStoreFeatureIterator(int offset, int count, SortBy[] sortOrder, Filter filter, Catalog catalog, CatalogStoreMapping mapping, RecordDescriptor recordDescriptor, Map<String, String> interpolationProperties) {
this.interpolationProperties = interpolationProperties;
CatalogFacade catalogFacade = catalog.getFacade();
layerIt = catalogFacade.list(ResourceInfo.class, filter, offset, count, sortOrder);
>>>>>>>
protected CatalogFacade catalogFacade;
protected Map<String, String> interpolationProperties = new HashMap<String, String>();
protected int offset;
protected int count;
protected SortBy[] sortOrder;
protected Filter filter;
protected int index;
protected Comparator<Info> comparator;
public CatalogStoreFeatureIterator(int offset, int count, SortBy[] sortOrder, Filter filter, Catalog catalog, CatalogStoreMapping mapping, RecordDescriptor recordDescriptor, Map<String, String> interpolationProperties) {
this.interpolationProperties = interpolationProperties;
this.offset = offset;
this.count = count;
this.sortOrder = sortOrder;
this.filter = filter;
catalogFacade = catalog.getFacade(); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.